Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# taskarg: ${fileDirname} 

2 

3import os 

4import json 

5import shutil 

6import base64 

7from dotenv import load_dotenv 

8from gidappdata.utility.functions import pathmaker, writebin, writeit, writejson, readbin, readit 

9from gidappdata.utility.exceptions import IsDuplicateNameError 

10from time import sleep, time 

11import gidlogger as glog 

12import click 

13import logging 

14import glob 

15from tempfile import TemporaryDirectory 

16import checksumdir 

17 

18log = logging.getLogger('gidappdata') 

19 

20log.info(glog.NEWRUN()) 

21 

22 

23def as_kb(in_size: int) -> int: 

24 """ 

25 Converter functions to convert the size in bytes to kilobytes. 

26 """ 

27 conv = 1024 

28 return in_size // conv 

29 

30 

31def as_mb(in_size: int) -> int: 

32 """ 

33 Converter functions to convert the size in bytes to megabytes. 

34 """ 

35 conv = 1024 * 1024 

36 return in_size // conv 

37 

38 

39def as_gb(in_size: int) -> int: 

40 """ 

41 Converter functions to convert the size in bytes to gigabytes. 

42 """ 

43 conv = 1024 * 1024 * 1024 

44 return in_size // conv 

45 

46 

47def find_files(): 

48 # sourcery skip: inline-immediately-returned-variable, list-comprehension 

49 _out = [] 

50 for _file in os.scandir(): 

51 if not _file.name.endswith('.py') and not os.path.isdir(_file.path): 

52 _out.append(_file.path) 

53 return _out 

54 

55 

56def pack_data(in_path): 

57 _folder = pathmaker(in_path, 'data_pack') 

58 archive_path = pathmaker(shutil.make_archive(pathmaker(in_path, 'base_userdata_archive'), format='zip', root_dir=_folder, logger=log)) 

59 log.info('data was archived with size of %s%s', *convert_file_size(archive_path)) 

60 return archive_path 

61 

62 

63def convert_to_bin(archive, use_base64): 

64 with open(archive, 'rb') as binf: 

65 _content = binf.read() 

66 if use_base64 is True: 

67 _content = base64.b64encode(_content) 

68 return _content 

69 

70 

71def write_to_pyfile(in_path, **kwargs): 

72 _path = pathmaker(in_path, 'bin_data.py') 

73 with open(_path, 'w') as _file: 

74 for key, value in kwargs.items(): 

75 _content = value 

76 _file.write(f'{key.strip()} = {_content.strip()}\n\n') 

77 log.info("bin data was written to python file: '%s'", _path) 

78 return _path 

79 

80 

81def write_construction_info(in_path, appname, author='BrocaProgs', uses_base64=True): 

82 _path = pathmaker(in_path, 'construction_info.env') 

83 with open(_path, 'w') as confo_file: 

84 confo_file.write(f"USES_BASE64 = {str(uses_base64)}\n") 

85 confo_file.write(f"AUTHOR_NAME = {str(author)}\n") 

86 confo_file.write(f"APP_NAME = {str(appname)}\n") 

87 

88 log.info("construction info file was written to python file: '%s'", _path) 

89 return _path 

90 

91 

92def convert_file_size(in_file_path): 

93 size = os.stat(in_file_path).st_size 

94 if as_gb(size) > 1: 

95 log_size = round(as_gb(size), 3) 

96 log_size_type = 'gb' 

97 elif as_mb(size) > 1: 

98 log_size = round(as_mb(size), 3) 

99 log_size_type = 'mb' 

100 elif as_kb(size) > 1: 

101 log_size = round(as_kb(size), 1) 

102 log_size_type = 'kb' 

103 else: 

104 log_size = size 

105 log_size_type = 'b' 

106 return log_size, log_size_type 

107 

108 

109def post_clean_zip_file(file_path): 

110 log.info("starting cleanup of zip file") 

111 os.remove(file_path) 

112 if os.path.isfile(file_path) is False: 

113 log.info("cleanup of zip file successfully done") 

114 else: 

115 log.critical("was not able to remove zip file '%s'", file_path) 

116 

117 

118def read_const_file(const_file): 

119 _out = [] 

120 with open(const_file, 'r') as c_file: 

121 content = c_file.read().splitlines() 

122 for line in content: 

123 if line != '' and ' = ' in line: 

124 _name, _value = line.split(' = ') 

125 _out.append((_name, _value)) 

126 return _out 

127 

128 

129def post_checks(bin_py_file, const_file): 

130 

131 log.info('checking existance of binary pyfile:') 

132 

133 if os.path.exists(bin_py_file) is True: 

134 log.info("binary pyfile does exist at '%s'", bin_py_file) 

135 log.info("--> size: %s%s", *convert_file_size(bin_py_file)) 

136 else: 

137 log.critical("binary pyfile does NOT exist! should be at '%s'", bin_py_file) 

138 

139 log.info('checking existance of construction info file:') 

140 if os.path.exists(const_file) is True: 

141 log.info("construction info file does exist at '%s'", const_file) 

142 log.info("set variables found in construction info file: ") 

143 log.info("-----------------------") 

144 for name, value in read_const_file(const_file): 

145 log.info("%s = '%s'", name, value) 

146 log.info("-----------------------") 

147 else: 

148 log.critical("construction info file does NOT exist! should be at '%s'", const_file) 

149 

150 

151def get_ignore_pattern_files(base_folder, pattern, file_list): 

152 for file in glob.iglob(base_folder + '/**/' + pattern, recursive=True): 

153 file_list.append(pathmaker(os.path.abspath(file))) 

154 

155 

156def temporary_move_out(filelist, tempdir): 

157 _out = [] 

158 for file in filelist: 

159 print('moving out "%s"', os.path.basename(file)) 

160 new_path = pathmaker(tempdir, os.path.basename(file)) 

161 

162 _out.append((file, new_path)) 

163 shutil.move(file, new_path) 

164 return _out 

165 

166 

167def move_back(filelist): 

168 for file in filelist: 

169 shutil.move(file[1], file[0]) 

170 

171 

172def duplicate_check(in_dir): 

173 collected_folder = [] 

174 collected_files = [] 

175 for dirname, folderlist, filelist in os.walk(in_dir): 

176 for folder in folderlist: 

177 if folder not in collected_folder: 

178 collected_folder.append(folder) 

179 else: 

180 raise IsDuplicateNameError(folder, pathmaker(dirname, folder), False) 

181 for file in filelist: 

182 if file not in collected_files: 

183 collected_files.append(file) 

184 else: 

185 raise IsDuplicateNameError(file, pathmaker(dirname, file), True) 

186 

187 

188@click.command() 

189@click.argument('init_userdata_dir') 

190@click.option('-n', '--appname', default=os.getenv('PROJECT_NAME')) 

191@click.option('-a', '--author', default='BrocaProgs') 

192@click.option('--use-base64/--dont-base64', '-64/-no64', default=True) 

193@click.option('--clean-zip-file/--keep-zip-file', '-cz/-kz', default=True) 

194@click.option('--ignore-pattern', '-i', multiple=True) 

195def cli_generate_user_data_binfile(init_userdata_dir, appname, author, use_base64, clean_zip_file, ignore_pattern): 

196 

197 start_time = time() 

198 duplicate_check(init_userdata_dir) 

199 tempdir = TemporaryDirectory() 

200 tempdir_name = tempdir.name 

201 ignore_files = [] 

202 for pattern in ignore_pattern: 

203 get_ignore_pattern_files(init_userdata_dir, pattern, ignore_files) 

204 moved_files = temporary_move_out(ignore_files, tempdir_name) 

205 if appname is None or appname == '': 

206 print('Unable to obtain "appname" from env variable, please set "PROJECT_NAME" env variable or provide appname as cli-option') 

207 return 

208 appname = appname.replace(' ', '-').replace('_', '-').title() 

209 author = author.replace(' ', '-').replace('_', '-').title() 

210 log.info("Starting conversion for data_pack in '%s'") 

211 

212 _archive = pack_data(init_userdata_dir) 

213 

214 log.info('converted archive to bin') 

215 

216 _py_file = write_to_pyfile(init_userdata_dir, bin_archive_data=convert_to_bin(_archive, use_base64)) 

217 

218 _const_file = write_construction_info(in_path=init_userdata_dir, appname=appname, author=author, uses_base64=use_base64) 

219 

220 if clean_zip_file is True: 

221 post_clean_zip_file(_archive) 

222 

223 log.info('running post-checks') 

224 post_checks(_py_file, _const_file) 

225 move_back(moved_files) 

226 tempdir.cleanup() 

227 

228 log.debug('overall time taken: %s seconds', str(round(time() - start_time, 3))) 

229 log.info('---done---') 

230 

231 

232def generate_user_data_binfile(init_userdata_dir, appname, author, use_base64=True, clean_zip_file=True, ignore_pattern=None): 

233 start_time = time() 

234 duplicate_check(init_userdata_dir) 

235 tempdir = TemporaryDirectory() 

236 tempdir_name = tempdir.name 

237 ignore_files = [] 

238 for pattern in ignore_pattern: 

239 get_ignore_pattern_files(init_userdata_dir, pattern, ignore_files) 

240 moved_files = temporary_move_out(ignore_files, tempdir_name) 

241 if appname is None or appname == '': 

242 print('Unable to obtain "appname" from env variable, please set "PROJECT_NAME" env variable or provide appname as cli-option') 

243 return 

244 appname = appname.replace(' ', '-').replace('_', '-').title() 

245 log.info("Starting conversion for data_pack in '%s'") 

246 checksum_userdata = checksumdir.dirhash(pathmaker(init_userdata_dir, 'data_pack')) 

247 _archive = pack_data(init_userdata_dir) 

248 

249 log.info('converted archive to bin') 

250 bin_data = convert_to_bin(_archive, use_base64) 

251 _py_file = write_to_pyfile(init_userdata_dir, bin_archive_data=bin_data) 

252 

253 _const_file = write_construction_info(in_path=init_userdata_dir, in_hash=checksum_userdata, appname=appname, author=author, uses_base64=use_base64) 

254 

255 if clean_zip_file is True: 

256 post_clean_zip_file(_archive) 

257 

258 log.info('running post-checks') 

259 post_checks(_py_file, _const_file) 

260 move_back(moved_files) 

261 tempdir.cleanup() 

262 log.debug('overall time taken: %s seconds', str(round(time() - start_time, 3))) 

263 log.info('---done---') 

264 return bin_data 

265 

266 

267if __name__ == '__main__': 

268 # THIS_FILE_DIR = os.path.abspath(os.path.dirname(__file__)) 

269 # _archive = pack_data(r"D:\Dropbox\hobby\Modding\Programs\Github\My_Repos\GidAppData\gidappdata\data\skeletons\prebuilt_standard\basic") 

270 # _py_file = write_to_pyfile(THIS_FILE_DIR, bin_archive_data=convert_to_bin(_archive, True)) 

271 pass