···
[2]: https://github.com/moby/moby/blob/4fb59c20a4fb54f944fe170d0ff1d00eb4a24d6f/image/spec/v1.2.md#image-json-field-descriptions
···
invalid_paths = [i for i in paths if not i.startswith(store_dir)]
228
-
assert len(invalid_paths) == 0, \
229
-
f"Expecting absolute paths from {store_dir}, but got: {invalid_paths}"
229
+
len(invalid_paths) == 0
230
+
), f"Expecting absolute paths from {store_dir}, but got: {invalid_paths}"
# First, calculate the tarball checksum and the size.
extract_checksum = ExtractChecksum()
236
-
mtime, uid, gid, uname, gname
234
+
archive_paths_to(extract_checksum, paths, mtime, uid, gid, uname, gname)
(checksum, size) = extract_checksum.extract()
path = f"{checksum}/layer.tar"
···
# Then actually stream the contents to the outer tarball.
read_fd, write_fd = os.pipe()
with open(read_fd, "rb") as read, open(write_fd, "wb") as write:
252
-
mtime, uid, gid, uname, gname
247
+
archive_paths_to(write, paths, mtime, uid, gid, uname, gname)
# Closing the write end of the fifo also closes the read end,
···
target_tar.addfile(tarinfo, f)
298
-
paths=[customisation_layer]
289
+
size=None, checksum=checksum, path=path, paths=[customisation_layer]
···
320
-
with open(sys.argv[1], "r") as f:
311
+
arg_parser = argparse.ArgumentParser(
313
+
This script generates a Docker image from a set of store paths. Uses
314
+
Docker Image Specification v1.2 as reference [1].
316
+
[1]: https://github.com/moby/moby/blob/master/image/spec/v1.2.md
319
+
arg_parser.add_argument(
323
+
JSON file with the following properties and writes the
324
+
image as an uncompressed tarball to stdout:
326
+
* "architecture", "config", "os", "created", "repo_tag" correspond to
327
+
the fields with the same name on the image spec [2].
328
+
* "created" can be "now".
329
+
* "created" is also used as mtime for files added to the image.
330
+
* "uid", "gid", "uname", "gname" is the file ownership, for example,
331
+
0, 0, "root", "root".
332
+
* "store_layers" is a list of layers in ascending order, where each
333
+
layer is the list of store paths to include in that layer.
336
+
arg_parser.add_argument(
337
+
"--repo_tag", "-t", type=str,
338
+
help="Override the RepoTags from the configuration"
341
+
args = arg_parser.parse_args()
342
+
with open(args.conf, "r") as f:
324
-
datetime.now(tz=timezone.utc)
325
-
if conf["created"] == "now"
326
-
else datetime.fromisoformat(conf["created"])
346
+
datetime.now(tz=timezone.utc)
347
+
if conf["created"] == "now"
348
+
else datetime.fromisoformat(conf["created"])
mtime = int(created.timestamp())
···
for num, store_layer in enumerate(conf["store_layers"], start=start):
343
-
print("Creating layer", num, "from paths:", store_layer,
345
-
info = add_layer_dir(tar, store_layer, store_dir,
346
-
mtime, uid, gid, uname, gname)
372
+
info = add_layer_dir(
373
+
tar, store_layer, store_dir, mtime, uid, gid, uname, gname
349
-
print("Creating layer", len(layers) + 1, "with customisation...",
380
+
"with customisation...",
352
-
add_customisation_layer(
354
-
conf["customisation_layer"],
384
+
add_customisation_layer(
385
+
tar, conf["customisation_layer"], mtime=mtime
print("Adding manifests...", file=sys.stderr)
···
372
-
"created": datetime.isoformat(created),
373
-
"comment": f"store paths: {layer.paths}"
402
+
"created": datetime.isoformat(created),
403
+
"comment": f"store paths: {layer.paths}",
···
"Config": image_json_path,
387
-
"RepoTags": [conf["repo_tag"]],
417
+
"RepoTags": [args.repo_tag or conf["repo_tag"]],
"Layers": [layer.path for layer in layers],