-
Notifications
You must be signed in to change notification settings - Fork 1
/
build-datapack.py
387 lines (338 loc) · 14.7 KB
/
build-datapack.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
import os
import sys
import glob
import json
import logging
import zipfile
import shutil
from zipfile import ZipFile
from urllib.request import urlretrieve
from tempfile import TemporaryDirectory
logging.basicConfig(level=logging.WARNING, stream=sys.stdout)
logger = logging.getLogger()
FILE_NAME = 'dependencies.json'
IGNORE_LIST = [FILE_NAME, '.git', '.github', '.gitignore']
HELP_TEXT="""
build-datapack.py will install dependencies into a datapack based on a provided
dependencies.json file (see git repo for reference). This file goes in the
same folder as your pack.mcmeta.
Usage:
build-datapack.py <path> <flags...>
Path:
Accepts top-level datapack path, or path to dependencies.json
Flags:
-h --help Prints this message.
-c --clean Removes installed dependencies without re-installing them.
It's recommended to run this before removing a dependency.
-t --no-tags Skips appending function tags step.
-u --upgrade <version> For use in versioned libraries. Finds & replaces version string
in dependencies.json with the provided string.
-o --output <dir> Outputs build to specified directory instead of building
in-place. Prevents modifying any source code.
"""
def get_dependencies(dependencies, temp_dir):
"""
Copies dependency files into the directory, downloading from a url
if needed and extracts zip files.
:param dependencies: list of dependencies to install, from dependencies.json
"""
for dependency in dependencies:
dst_path = f'{temp_dir}/{dependency["name"]}'
dst_zip = dst_path+'.zip'
if 'local' in dependency:
src_path = dependency['local']
if os.path.isdir(src_path):
shutil.copytree(src_path, dst_path, ignore=shutil.ignore_patterns(*IGNORE_LIST))
elif zipfile.is_zipfile(src_path):
shutil.copyfile(src_path, dst_zip)
else:
logger.warning("Unable to locate local file " + src_path)
elif 'url' in dependency:
url = dependency["url"]
extensions = ['']
success = False
if 'github.com' in url and not url.endswith('.zip'):
extensions.append('/archive/master.zip')
extensions.append('/archive/main.zip')
for ext in extensions:
logger.info(f'Attempting to retrieve {dependency["name"]} from {url+ext}')
urlretrieve(url+ext, dst_zip)
if zipfile.is_zipfile(dst_zip):
success = True
break
if not success:
raise RuntimeError(f'Failed to retrieve dependency {dependency["name"]}. Check the url and try again.')
else:
raise RuntimeError(f'{dependency["name"]} does not have a url or path field.')
if zipfile.is_zipfile(dst_zip):
with ZipFile(dst_zip) as f:
logger.info(f'extracting {dst_zip}')
f.extractall(dst_path)
def find_dp_paths(dependencies, temp_dir):
"""
Locates the '/data' folder for dependencies.
:param dependencies: list of dependencies from dependencies.json
"""
paths = []
for dependency in dependencies:
path = f'{temp_dir}/{dependency["name"]}'
dp_path = path
if 'path' in dependency and len(dependency['path']) > 0:
dp_path = path + '/' + dependency['path']
if not os.path.isdir(dp_path):
dirs = os.listdir(path)
dp_path = f'{path}/{dirs[0]}/{dependency["path"]}'
if not os.path.isdir(dp_path):
raise RuntimeError(f'Told to search for datapack at {dp_path}, but that isn''t a directory!')
queue = [dp_path]
found_directory = False
while len(queue) > 0:
current_path = queue[0]
queue.remove(queue[0])
if current_path.endswith('/data'):
dp_path = current_path
found_directory = True
break
else:
for dir in os.listdir(current_path):
new_path = f'{current_path}/{dir}'
if os.path.isdir(new_path):
queue.append(new_path)
if not found_directory:
raise RuntimeError(f"failed to locate '/data' directory at {dp_path}")
logger.info(f'located data directory at {dp_path}')
paths.append(dp_path)
return paths
def clean_dependencies(dp_paths, install_path, namespaces):
"""
Removes existing dependencies at install location
:param dp_paths: list of paths to copy from
:param install_path: path to copy to
:namespaces: list of protected namespaces
"""
cleaned_dirs = []
for dp_path in dp_paths:
top_dirs = os.listdir(dp_path)
for dir in top_dirs:
clean_path = install_path + '/' + dir
if dir not in namespaces and dir not in cleaned_dirs and os.path.isdir(clean_path):
logger.info(f'cleaning {clean_path}')
shutil.rmtree(clean_path)
cleaned_dirs.append(dir)
def update_version(dp_path, old_ver, new_ver, namespaces):
"""
Removes existing dependencies at install location
:param dp_paths: list of paths to copy from
:param install_path: path to copy to
:namespaces: list of protected namespaces
"""
if len(namespaces) == 0:
queue = [dp_path]
else:
queue = []
for namespace in namespaces:
queue.append(f'{dp_path}/{namespace}')
while len(queue) > 0:
current_path = queue[0]
queue.remove(queue[0])
for f in os.listdir(current_path):
new_path = f'{current_path}/{f}'
if old_ver in f:
f = f.replace(old_ver, new_ver, 1)
os.rename(new_path, f'{current_path}/{f}')
logger.info(f'Renamed {new_path} to {current_path}/{f}')
new_path = f'{current_path}/{f}'
if os.path.isdir(new_path):
queue.append(new_path)
elif os.path.isfile(new_path):
replace_file_contents(new_path, old_ver, new_ver)
def install_dependencies(dp_paths, install_path, namespaces):
"""
Installs dependencies by copying files and merging where pratical
:param dp_paths: list of paths to copy from
:param install_path: path to copy to
:namespaces: list of protected namespaces
"""
cleaned_files = []
for dp_path in dp_paths:
top_dirs = os.listdir(dp_path)
for dir in top_dirs:
files = glob.glob('**/*.*', recursive=True, root_dir=f'{dp_path}/{dir}')
for f in files:
src_path = f'{dp_path}/{dir}/{f}'
dst_path = f'{install_path}/{dir}/{f}'
if os.path.isfile(dst_path):
if dir in namespaces and src_path not in cleaned_files and not has_tag('-o', '-output'):
logging.info(f'Cleaning file {dst_path}')
os.remove(dst_path)
copy_file(src_path, dst_path)
elif src_path.endswith('.json'):
merge_tag_files(src_path, dst_path)
else:
logger.warning(f'File {dst_path} already exists. Skipping.')
elif os.path.isfile(src_path):
copy_file(src_path, dst_path)
def append_tag_files(dir_path, tags):
"""
Appends provided values to tags.
:param dir_path: path of '/data' folder containing tags
:param tags: list of tag:value pairs from dependencies.json
"""
if tags is None:
return
for tag in tags:
split = tag['tag'].split(':')
path = f'{dir_path}/{split[0]}/tags/functions/{split[1]}.json'
append_tag_file(path, tag['value'])
def merge_tag_files(src, dst):
""" Appends a single line value to a tag
:param path: file to append (must be a tag file, like function tags)
:param value: value to append (example, 'my_datapack:load')
"""
logger.info(f'merging {src} into {dst}')
assert os.path.isfile(src) and os.path.isfile(dst) and src != dst
merge = ''
with open(src, 'r') as f_src:
with open(dst, 'r') as f_dst:
src_contents = json.loads(f_src.read())
dst_contents = json.loads(f_dst.read())
if 'values' not in dst_contents or 'values' not in src_contents:
return
for entry in src_contents['values']:
if entry not in dst_contents['values']:
dst_contents['values'].append(entry)
merge = json.dumps(dst_contents, indent=2)
with open(dst, 'w') as f_dst:
f_dst.write(merge)
def append_tag_file(path, value):
""" Appends a single line value to a tag
:param path: file to append (must be a tag file, like function tags)
:param value: value to append (example, 'my_datapack:load')
"""
logger.info(f'merging {value} into {path}')
assert os.path.isfile(path)
merge = ''
with open(path, 'r') as f:
contents = json.loads(f.read())
if value not in contents['values']:
contents['values'].append(value)
merge = json.dumps(contents, indent=2)
with open(path, 'w') as f_dst:
f_dst.write(merge)
def copy_file(src, dst):
try:
os.makedirs(os.path.dirname(dst), exist_ok=True)
shutil.copyfile(src, dst)
except PermissionError:
logger.warning(f'Failed to copy file {src} to {dst} - PermissionError')
def has_tag(short, long):
return short in sys.argv or long in sys.argv
def get_tag_value(short, long):
index = -1
if short in sys.argv:
index = sys.argv.index(short) + 1
elif long in sys.argv:
index = sys.argv.index(long) + 1
if index == -1 or index >= len(sys.argv) or sys.argv[index].startswith('-'):
raise RuntimeError(f'Modifer {short} {long} requires an additional value, ie. ''python build-datapack.py {short} <data>''')
else:
return sys.argv[index]
def replace_file_contents(path, old_str, new_str):
with open(path, 'r') as f:
contents = f.read()
contents = contents.replace(old_str, new_str)
with open(path, 'w') as f:
f.write(contents)
def main(path):
# open dependencies.json
with open(f'{path}/{FILE_NAME}', 'r') as f:
try:
dependencies_json = json.loads(f.read())
except e:
raise RuntimeError('Invalid JSON file. Try using a JSON validator.') from e
logger.debug(f'dependencies file: {dependencies_json}')
# verify /data directory exists
dir_path = path + '/data'
if not os.path.isdir(dir_path):
raise RuntimeError("Datapack does not have a '/data' directory")
# get list of dependencies
dependencies = dependencies_json.get('dependencies', None)
if dependencies is None:
raise ValueError('Missing dependencies field in dependencies.json')
# get list of protected namespaces
namespaces = dependencies_json.get('namespaces', None)
if namespaces is None:
namespaces = []
elif isinstance(namespaces, str):
namespaces = [namespaces]
# get version
version = dependencies_json.get('version', None)
# add ignored files
ignored_files = dependencies_json.get('ignored_files', None)
if type(ignored_files) is list:
IGNORE_LIST.extend(ignored_files)
with TemporaryDirectory() as temp_dir:
# Step 0: copy datapack to temp if output flag is enabled
path_truncated = path
if '/' in path_truncated:
path_truncated = path_truncated[path_truncated.rfind('/')+1:]
if has_tag('-o', '-output'):
print('Copying datapack files...')
if has_tag('-c', '--clean'):
raise RuntimeError('-o --output and -c --clean are exclusionary, pick one and run again.')
shutil.copytree(path, f'{temp_dir}/{path_truncated}', ignore=shutil.ignore_patterns(*IGNORE_LIST))
dir_path = f'{temp_dir}/{path_truncated}/data'
# Step 1: copy/download/extract dependencies
print('Grabbing dependencies...')
get_dependencies(dependencies, temp_dir)
# Step 2: find dependency datapack paths
dependency_paths = find_dp_paths(dependencies, temp_dir)
# Step 3: clean existing dependencies
if not has_tag('-o', '--output'):
print('Cleaning dependencies...')
clean_dependencies(dependency_paths, dir_path, namespaces)
# Step 4: upgrade version (if flag is used)
if has_tag('-u', '--upgrade'):
if version is None:
raise RuntimeError('Specify current version in dependencies.json to upgrade.')
new_version = get_tag_value('-u', '--upgrade')
if version == new_version:
logger.warning('Current and target version match, skipping upgrade step.')
else:
print(f'Upgrading from {version} to {new_version}...')
update_version(dir_path, version, new_version, namespaces)
dependencies_json['version'] = new_version
for tag in dependencies_json['append_function_tags']:
tag['tag'] = tag['tag'].replace(version, new_version)
tag['value'] = tag['value'].replace(version, new_version)
with open(f'{path}/{FILE_NAME}', 'w') as f:
f.write(json.dumps(dependencies_json, indent=2))
if not has_tag('-c', '--clean'):
# Step 5: install dependencies
print('Installing dependencies...')
install_dependencies(dependency_paths, dir_path, namespaces)
# Step 6: Append to function tags
if not has_tag('-t', '--no-tags'):
print('Appending function tags...')
append_tag_files(dir_path, dependencies_json.get('append_function_tags', None))
# Step 7: Copy output if -o --ouput is used
if has_tag('-o', '-output'):
output_dir = get_tag_value('-o', '--output')
if not os.path.isdir(output_dir):
raise RuntimeError("Directory specified for -o --ouput does not exist.")
print('Zipping datapack...')
os.chdir(output_dir)
shutil.make_archive(path_truncated, 'zip', f'{temp_dir}/{path_truncated}')
print('Finished. Build successful.')
if __name__ == "__main__":
if has_tag('-h', '--help') or len(sys.argv) == 1:
print(HELP_TEXT)
else:
path = sys.argv[1]
if path.endswith(FILE_NAME):
path = path[:-len(FILE_NAME)]
try:
main(path)
except Exception as e:
logger.error(e)
raise e