forked from containers/composefs
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Generates 10 random images and runs some test on them. The tests run are: * Dump (parse and re-write it) image and esure we get identical results * Run fsck.erofs on image * Mount image using fuse and ensure dumpdir is the same (sans non-user-xattr and whiteout) * Run mkcomposefs on the fuse mount and ensure the result is similar. (It produces same fuse mount dump, but due to non-user xattrs and whiteouts its not producing an identical composefs image) Signed-off-by: Alexander Larsson <[email protected]>
- Loading branch information
1 parent
a7a3dc9
commit 2c365e2
Showing
9 changed files
with
332 additions
and
6 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -3,6 +3,7 @@ | |
/Makefile | ||
libcomposefs/Makefile | ||
tools/Makefile | ||
tests/Makefile | ||
Makefile.in | ||
aclocal.m4 | ||
autom4te.cache | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,4 @@ | ||
#!/bin/bash | ||
set -xeuo pipefail | ||
export DEBIAN_FRONTEND=noninteractive | ||
apt-get install -y automake libtool autoconf autotools-dev git make gcc libyajl-dev libssl-dev libfsverity-dev pkg-config libfuse3-dev | ||
apt-get install -y automake libtool autoconf autotools-dev git make gcc libyajl-dev libssl-dev libfsverity-dev pkg-config libfuse3-dev python3 libcap2-bin |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,195 @@ | ||
#!/usr/bin/python3 | ||
|
||
import argparse | ||
import hashlib | ||
import os | ||
import random | ||
import shlex | ||
import shutil | ||
import stat | ||
import string | ||
import sys | ||
|
||
adjectives = ["adorable", "adventurous", "aggressive", "agreeable", "alert", "alive", "amused", "angry", "annoyed", "annoying", "anxious", "arrogant", "ashamed", "attractive", "average", "awful", "bad", "beautiful", "better", "bewildered", "black", "bloody", "blue", "blue-eyed", "blushing", "bored", "brainy", "brave", "breakable", "bright", "busy", "calm", "careful", "cautious", "charming", "cheerful", "clean", "clear", "clever", "cloudy", "clumsy", "colorful", "combative", "comfortable", "concerned", "condemned", "confused", "cooperative", "courageous", "crazy", "creepy", "crowded", "cruel", "curious", "cute", "dangerous", "dark", "dead", "defeated", "defiant", "delightful", "depressed", "determined", "different", "difficult", "disgusted", "distinct", "disturbed", "dizzy", "doubtful", "drab", "dull", "eager", "easy", "elated", "elegant", "embarrassed", "enchanting", "encouraging", "energetic", "enthusiastic", "envious", "evil", "excited", "expensive", "exuberant", "fair", "faithful", "famous", "fancy", "fantastic", "fierce", "filthy", "fine", "foolish", "fragile", "frail", "frantic", "friendly", "frightened", "funny", "gentle", "gifted", "glamorous", "gleaming", "glorious", "good", "gorgeous", "graceful", "grieving", "grotesque", "grumpy", "handsome", "happy", "healthy", "helpful", "helpless", "hilarious", "homeless", "homely", "horrible", "hungry", "hurt", "ill", "important", "impossible", "inexpensive", "innocent", "inquisitive", "itchy", "jealous", "jittery", "jolly", "joyous", "kind", "lazy", "light", "lively", "lonely", "long", "lovely", "lucky", "magnificent", "misty", "modern", "motionless", "muddy", "mushy", "mysterious", "nasty", "naughty", "nervous", "nice", "nutty", "obedient", "obnoxious", "odd", "old-fashioned", "open", "outrageous", "outstanding", "panicky", "perfect", "plain", "pleasant", "poised", "poor", "powerful", "precious", "prickly", "proud", "putrid", "puzzled", "quaint", "real", "relieved", "repulsive", "rich", "scary", "selfish", "shiny", "shy", "silly", "sleepy", "smiling", "smoggy", "sore", "sparkling", "splendid", "spotless", "stormy", "strange", "stupid", "successful", "super", "talented", "tame", "tasty", "tender", "tense", "terrible", "thankful", "thoughtful", "thoughtless", "tired", "tough", "troubled", "ugliest", "ugly", "uninterested", "unsightly", "unusual", "upset", "uptight", "vast", "victorious", "vivacious", "wandering", "weary", "wicked", "wide-eyed", "wild", "witty", "worried", "worrisome", "wrong", "zany", "zealous"] | ||
|
||
nouns = ["apple", "air", "conditioner", "airport", "ambulance", "aircraft", "apartment", "arrow", "antlers", "apro", "alligator", "architect", "ankle", "armchair", "aunt", "ball", "bermudas", "beans", "balloon", "bear", "blouse", "bed", "bow", "bread", "black", "board", "bones", "bill", "bitterness", "boxers", "belt", "brain", "buffalo", "bird", "baby", "book", "back", "butter", "bulb", "buckles", "bat", "bank", "bag", "bra", "boots", "blazer", "bikini", "bookcase", "bookstore", "bus", "stop", "brass", "brother", "boy", "blender", "bucket", "bakery", "bow", "bridge", "boat", "car", "cow", "cap", "cooker", "cheeks", "cheese", "credenza", "carpet", "crow", "crest", "chest", "chair", "candy", "cabinet", "cat", "coffee", "children", "cookware", "chaise", "longue", "chicken", "casino", "cabin", "castle", "church", "cafe", "cinema", "choker", "cravat", "cane", "costume", "cardigan", "chocolate", "crib", "couch", "cello", "cashier", "composer", "cave", "country", "computer", "canoe", "clock", "charlie", "dog", "deer", "donkey", "desk", "desktop", "dress", "dolphin", "doctor", "dentist", "drum", "dresser", "designer", "detective", "daughter", "egg", "elephant", "earrings", "ears", "eyes", "estate", "finger", "fox", "frock", "frog", "fan", "freezer", "fish", "film", "foot", "flag", "factory", "father", "farm", "forest", "flower", "fruit", "fork", "grapes", "goat", "gown", "garlic", "ginger", "giraffe", "gauva", "grains", "gas", "station", "garage", "gloves", "glasses", "gift", "galaxy", "guitar", "grandmother", "grandfather", "governor", "girl", "guest", "hamburger", "hand", "head", "hair", "heart", "house", "horse", "hen", "horn", "hat", "hammer", "hostel", "hospital", "hotel", "heels", "herbs", "host", "jacket", "jersey", "jewelry", "jaw", "jumper", "judge", "juicer", "keyboard", "kid", "kangaroo", "koala", "knife", "lemon", "lion", "leggings", "leg", "laptop", "library", "lamb", "london", "lips", "lung", "lighter", "luggage", "lamp", "lawyer", "mouse", "monkey", "mouth", "mango", "mobile", "milk", "music", "mirror", "musician", "mother", "man", "model", "mall", "museum", "market", "moonlight", "medicine", "microscope", "newspaper", "nose", "notebook", "neck", "noodles", "nurse", "necklace", "noise", "ocean", "ostrich", "oil", "orange", "onion", "oven", "owl", "paper", "panda", "pants", "palm", "pasta", "pumpkin", "pharmacist", "potato", "parfume", "panther", "pad", "pencil", "pipe", "police", "pen", "pharmacy", "petrol", "station", "police", "station", "parrot", "plane", "pigeon", "phone", "peacock", "pencil", "pig", "pouch", "pagoda", "pyramid", "purse", "pancake", "popcorn", "piano", "physician", "photographer", "professor", "painter", "park", "plant", "parfume", "radio", "razor", "ribs", "rainbow", "ring", "rabbit", "rice", "refrigerator", "remote", "restaurant", "road", "surgeon", "scale", "shampoo", "sink", "salt", "shark", "sandals", "shoulder", "spoon", "soap", "sand", "sheep", "sari", "stomach", "stairs", "soup", "shoes", "scissors", "sparrow", "shirt", "suitcase", "stove", "stairs", "snowman", "shower", "swan", "suit", "sweater", "smoke", "skirt", "sofa", "socks", "stadium", "skyscraper", "school", "sunglasses", "sandals", "slippers", "shorts", "sandwich", "strawberry", "spaghetti", "shrimp", "saxophone", "sister", "son", "singer", "senator", "street", "supermarket", "swimming", "pool", "star", "sky", "sun", "spoon", "ship", "smile", "table", "turkey", "tie", "toes", "truck", "train", "taxi", "tiger", "trousers", "tongue", "television", "teacher", "turtle", "tablet", "train", "station", "toothpaste", "tail", "theater", "trench", "coat", "tea", "tomato", "teen", "tunnel", "temple", "town", "toothbrush", "tree", "toy", "tissue", "telephone", "underwear", "uncle", "umbrella", "vest", "voice", "veterinarian", "villa", "violin", "village", "vehicle", "vase", "wallet", "wolf", "waist", "wrist", "water", "melon", "whale", "water", "wings", "whisker", "watch", "woman", "washing", "machine", "wheelchair", "waiter", "wound", "xylophone", "zebra", "zoo"] | ||
|
||
def with_chance(chance): | ||
return random.random() <= chance | ||
|
||
class Chance(): | ||
def __init__(self): | ||
self.value = random.random() | ||
self.start = 0 | ||
|
||
def with_chance(self, chance): | ||
if self.start > 1: | ||
print("Too many choices") | ||
start = self.start | ||
end = self.start + chance | ||
self.start = end | ||
return self.value >= start and self.value < end | ||
|
||
# Choose one of weighted options | ||
def choice(self, options): | ||
for value, chance in options: | ||
if self.with_chance(chance): | ||
return value | ||
# Default to first | ||
value, chance = options[0] | ||
return value | ||
|
||
def gen_dir_mode(): | ||
# For creation to work we want all dirs u+rwx | ||
return random.choice([0o777, 0o755, 0o750, 0o700]) | ||
|
||
def gen_file_mode(): | ||
return random.choice([0o644, 0o666, 0o755, 0o777]) | ||
|
||
def gen_filename(): | ||
if not args.unreadable: | ||
name = bytes(random.choice(adjectives) + "_" + random.choice(nouns) + str(random.randint(1,999)), "utf-8") | ||
if len(name) > 255: | ||
return gen_filename() | ||
return name | ||
|
||
name_len = random.randrange(1, 255) | ||
name = [0] * name_len | ||
for i in range(name_len): | ||
c = random.randrange(1, 255) | ||
while c == ord('/'): | ||
c = random.randrange(1, 255) | ||
name[i] = c | ||
name=bytes(name) | ||
if name == b'.' or name == b'..': | ||
return gen_filename() | ||
return name | ||
|
||
def gen_filenames(): | ||
c = Chance() | ||
# 5% of dirs are huge | ||
if c.with_chance(0.05): | ||
num_files = random.randrange(0, 4096) | ||
else: | ||
num_files = random.randrange(0, 25) | ||
|
||
files = [] | ||
for i in range(num_files): | ||
files.append(gen_filename()) | ||
|
||
return list(sorted(set(files))) | ||
|
||
def gen_xattrname(): | ||
return random.choice(nouns) + str(random.randint(1,9)) | ||
|
||
def gen_xattrdata(): | ||
return bytes(random.choice(adjectives) + str(random.randint(1,9)), "utf-8") | ||
|
||
|
||
def gen_hierarchy(root): | ||
num_dirs = random.randrange(30, 50) | ||
dirs = [] | ||
for i in range(num_dirs): | ||
parent = random.choice([root] * 3 + dirs); | ||
p = os.path.join(parent, gen_filename()) | ||
dirs.append(p) | ||
# Sort and drop any (unlikely) duplicateds | ||
return list(sorted(set(dirs))) | ||
|
||
def set_user_xattr(path): | ||
n_xattrs = random.randrange(0, 3) | ||
for i in range(n_xattrs): | ||
name = "user." + gen_xattrname() | ||
value = gen_xattrdata() | ||
os.setxattr(path, name, value, follow_symlinks=False) | ||
|
||
old_files = [] | ||
def make_regular_file(path): | ||
with os.fdopen(os.open(path, os.O_WRONLY|os.O_CREAT, gen_file_mode()), 'wb') as fd: | ||
c = Chance(); | ||
# 5% of reuse old file data | ||
if len(old_files) > 0 and c.with_chance(0.05): | ||
reused = random.choice(old_files) | ||
with os.fdopen(os.open(reused, os.O_RDONLY), 'rb') as src: | ||
shutil.copyfileobj(src, fd) | ||
return | ||
|
||
# 5% of files are large | ||
if c.with_chance(0.05): | ||
size = random.randrange(0, 4*1024*1024) | ||
else: # Rest are small | ||
size = random.randrange(0, 256) | ||
|
||
data = random.randbytes(size) | ||
fd.write(data) | ||
# Save path for reuse | ||
old_files.append(path) | ||
|
||
set_user_xattr(path) | ||
|
||
def make_symlink(path): | ||
target = gen_filename() | ||
os.symlink(target, path) | ||
|
||
def make_node(path): | ||
if not args.privileged: | ||
return | ||
target = gen_filename() | ||
os.mknod(path, gen_file_mode() | random.choice([stat.S_IFCHR,stat.S_IFBLK]), os.makedev(0,0)) | ||
|
||
def make_whiteout(path): | ||
if args.nowhiteout: | ||
return | ||
target = gen_filename() | ||
os.mknod(path, gen_file_mode() | stat.S_IFCHR, device=os.makedev(0,0)) | ||
|
||
def make_fifo(path): | ||
target = gen_filename() | ||
os.mknod(path, gen_file_mode() | stat.S_IFIFO) | ||
|
||
def make_file(path): | ||
c = Chance(); | ||
f = c.choice([ | ||
(make_regular_file, 0.7), | ||
(make_symlink, 0.15), | ||
(make_fifo, 0.05), | ||
(make_node, 0.05), | ||
(make_whiteout, 0.05) | ||
]) | ||
f(path) | ||
|
||
def make_dir(path, dirs): | ||
os.mkdir(path, mode=gen_dir_mode()) | ||
set_user_xattr(path) | ||
files = gen_filenames() | ||
for f in files: | ||
child_path = os.path.join(path, f) | ||
if child_path in dirs: | ||
continue | ||
|
||
func = random.choice([make_file]) | ||
func(child_path) | ||
|
||
argParser = argparse.ArgumentParser() | ||
argParser.add_argument("--seed") | ||
argParser.add_argument("--unreadable", action='store_true') | ||
argParser.add_argument("--privileged", action='store_true') | ||
argParser.add_argument("--nowhiteout", action='store_true') | ||
argParser.add_argument('path') | ||
|
||
args = argParser.parse_args() | ||
|
||
if args.seed: | ||
seed = args.seed | ||
else: | ||
seed = os.urandom(16).hex() | ||
random.seed(seed) | ||
print(f"Using seed '{seed}'") | ||
|
||
# Generate tree structure | ||
root = bytes(args.path,"utf-8") | ||
dirs = gen_hierarchy(root) | ||
|
||
make_dir(root, dirs) | ||
for d in dirs: | ||
make_dir(d, dirs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
#!/usr/bin/bash | ||
|
||
check_whiteout () { | ||
tmpfile=$(mktemp /tmp/lcfs-whiteout.XXXXXX) | ||
rm -f $tmpfile | ||
if mknod $tmpfile c 0 0 &> /dev/null; then | ||
echo y | ||
else | ||
echo n | ||
fi | ||
rm -f $tmpfile | ||
} | ||
|
||
check_fuse () { | ||
fusermount --version >/dev/null 2>&1 || return 1 | ||
|
||
capsh --print | grep -q 'Bounding set.*[^a-z]cap_sys_admin' || \ | ||
return 1 | ||
|
||
[ -w /dev/fuse ] || return 1 | ||
[ -e /etc/mtab ] || return 1 | ||
|
||
return 0 | ||
} | ||
|
||
check_erofs_fsck () { | ||
if which fsck.erofs &>/dev/null; then | ||
echo y | ||
else | ||
echo n | ||
fi | ||
} | ||
|
||
[[ -v can_whiteout ]] || can_whiteout=$(check_whiteout) | ||
[[ -v has_fuse ]] || has_fuse=$(if check_fuse; then echo y; else echo n; fi) | ||
[[ -v has_fsck ]] || has_fsck=$(check_erofs_fsck) | ||
|
||
echo Test options: can_whiteout=$can_whiteout has_fuse=$has_fuse has_fsck=$has_fsck |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,87 @@ | ||
#!/usr/bin/bash | ||
|
||
BINDIR="$1" | ||
|
||
set -e | ||
|
||
workdir=$(mktemp -d /var/tmp/lcfs-test.XXXXXX) | ||
exit_cleanup() { | ||
umount "$workdir/mnt" &> /dev/null || true | ||
rm -rf -- "$workdir" | ||
} | ||
|
||
trap exit_cleanup EXIT | ||
|
||
. test-lib.sh | ||
|
||
GENDIRARGS="" | ||
if [ ${can_whiteout} == "n" ]; then | ||
GENDIRARGS="$GENDIRARGS --nowhiteout" | ||
fi | ||
|
||
if [[ -v seed ]]; then | ||
GENDIRARGS="$GENDIRARGS --seed=$seed" | ||
fi | ||
|
||
test_random() { | ||
echo Generating root dir | ||
./gendir $GENDIRARGS $workdir/root | ||
./dumpdir --userxattr --whiteout $workdir/root > $workdir/root.dump | ||
echo Generating composefs image | ||
${VALGRIND_PREFIX} ${BINDIR}/mkcomposefs --digest-store=$workdir/objects $workdir/root $workdir/root.cfs | ||
if [ $has_fsck == y ]; then | ||
fsck.erofs $workdir/root.cfs | ||
fi | ||
|
||
# Loading and dumping should produce the identical results | ||
echo Dumping composefs image | ||
${VALGRIND_PREFIX} ${BINDIR}/composefs-dump $workdir/root.cfs $workdir/dump.cfs | ||
if ! cmp $workdir/root.cfs $workdir/dump.cfs; then | ||
echo Dump is not reproducible | ||
diff -u <(${BINDIR}/composefs-info dump $workdir/root.cfs) <(${BINDIR}/composefs-info dump $workdir/dump.cfs) | ||
exit 1 | ||
fi | ||
|
||
if [ $has_fuse == 'n' ]; then | ||
return; | ||
fi | ||
|
||
mkdir -p $workdir/mnt | ||
echo Mounting composefs image using fuse | ||
${BINDIR}/composefs-fuse -o source=$workdir/root.cfs,basedir=$workdir/objects $workdir/mnt | ||
./dumpdir --userxattr --whiteout $workdir/mnt > $workdir/fuse.dump | ||
|
||
${VALGRIND_PREFIX} ${BINDIR}/mkcomposefs --digest-store=$workdir/objects $workdir/mnt $workdir/fuse.cfs | ||
if [ $has_fsck == y ]; then | ||
fsck.erofs $workdir/fuse.cfs | ||
fi | ||
|
||
umount $workdir/mnt | ||
|
||
if ! cmp $workdir/root.dump $workdir/fuse.dump; then | ||
echo Real dir and fuse dump differ | ||
diff -u $workdir/root.dump $workdir/fuse.dump | ||
exit 1 | ||
fi | ||
|
||
${BINDIR}/composefs-fuse -o source=$workdir/fuse.cfs,basedir=$workdir/objects $workdir/mnt | ||
./dumpdir --userxattr --whiteout $workdir/mnt > $workdir/fuse2.dump | ||
umount $workdir/mnt | ||
|
||
# fuse.cfs and fuse2.cfs files differ due to whiteout conversions and non-user xattrs. | ||
# However, the listed output should be the same: | ||
if ! cmp $workdir/fuse.dump $workdir/fuse2.dump; then | ||
echo Fuse and fuse2 dump differ | ||
diff -u $workdir/fuse.dump $workdir/fuse2.dump | ||
exit 1 | ||
fi | ||
} | ||
|
||
if [[ -v seed ]]; then | ||
test_random | ||
else | ||
for i in $(seq 10) ; do | ||
test_random | ||
rm -rf $workdir/* | ||
done | ||
fi |