Skip to content

Commit

Permalink
Merge pull request #191 from alexlarsson/more-tests
Browse files Browse the repository at this point in the history
More tests
  • Loading branch information
alexlarsson authored Sep 19, 2023
2 parents cca8be4 + 2c365e2 commit 420cd38
Show file tree
Hide file tree
Showing 16 changed files with 502 additions and 61 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ jobs:
uses: actions/checkout@v3
- name: Install dependencies
run: sudo ./hacking/installdeps.sh
- name: Install fsck.erofs
run: sudo apt install erofs-utils
- name: Configure
run: ./autogen.sh && ./configure --prefix=/usr --sysconfdir=/etc --libdir=/usr/lib/$(dpkg-architecture -qDEB_HOST_MULTIARCH) CFLAGS='-Wall -Werror'
- name: Build
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
/Makefile
libcomposefs/Makefile
tools/Makefile
tests/Makefile
Makefile.in
aclocal.m4
autom4te.cache
Expand Down
2 changes: 1 addition & 1 deletion hacking/installdeps.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/bin/bash
set -xeuo pipefail
export DEBIAN_FRONTEND=noninteractive
apt-get install -y automake libtool autoconf autotools-dev git make gcc libyajl-dev libssl-dev libfsverity-dev pkg-config libfuse3-dev
apt-get install -y automake libtool autoconf autotools-dev git make gcc libyajl-dev libssl-dev libfsverity-dev pkg-config libfuse3-dev python3 libcap2-bin
2 changes: 1 addition & 1 deletion libcomposefs/lcfs-writer-erofs.c
Original file line number Diff line number Diff line change
Expand Up @@ -881,7 +881,7 @@ static int write_erofs_inode_data(struct lcfs_ctx_s *ctx, struct lcfs_node_s *no
i.i_uid = lcfs_u32_to_file(node->inode.st_uid);
i.i_gid = lcfs_u32_to_file(node->inode.st_gid);
i.i_mtime = lcfs_u64_to_file(node->inode.st_mtim_sec);
i.i_mtime_nsec = lcfs_u64_to_file(node->inode.st_mtim_nsec);
i.i_mtime_nsec = lcfs_u32_to_file(node->inode.st_mtim_nsec);

if (type == S_IFDIR) {
if (node->erofs_n_blocks > 0) {
Expand Down
18 changes: 15 additions & 3 deletions libcomposefs/lcfs-writer.c
Original file line number Diff line number Diff line change
Expand Up @@ -376,13 +376,15 @@ int lcfs_write_to(struct lcfs_node_s *root, struct lcfs_write_options_s *options
return 0;
}

static int read_xattrs(struct lcfs_node_s *ret, int dirfd, const char *fname)
static int read_xattrs(struct lcfs_node_s *ret, int dirfd, const char *fname,
int buildflags)
{
char path[PATH_MAX];
ssize_t list_size;
cleanup_free char *list = NULL;
ssize_t r = 0;
cleanup_fd int fd = -1;
bool user_xattr = (buildflags & LCFS_BUILD_USER_XATTRS) != 0;

fd = openat(dirfd, fname, O_PATH | O_NOFOLLOW | O_CLOEXEC, 0);
if (fd < 0)
Expand All @@ -409,6 +411,9 @@ static int read_xattrs(struct lcfs_node_s *ret, int dirfd, const char *fname)
ssize_t value_size;
cleanup_free char *value = NULL;

if (user_xattr && !str_has_prefix(it, "user."))
continue;

value_size = getxattr(path, it, NULL, 0);
if (value_size < 0) {
return value_size;
Expand Down Expand Up @@ -563,7 +568,14 @@ struct lcfs_node_s *lcfs_load_node_from_file(int dirfd, const char *fname,

if (buildflags & ~(LCFS_BUILD_SKIP_XATTRS | LCFS_BUILD_USE_EPOCH |
LCFS_BUILD_SKIP_DEVICES | LCFS_BUILD_COMPUTE_DIGEST |
LCFS_BUILD_NO_INLINE)) {
LCFS_BUILD_NO_INLINE | LCFS_BUILD_USER_XATTRS)) {
errno = EINVAL;
return NULL;
}

if ((buildflags & LCFS_BUILD_SKIP_XATTRS) &&
(buildflags & LCFS_BUILD_USER_XATTRS)) {
/* These conflict */
errno = EINVAL;
return NULL;
}
Expand Down Expand Up @@ -621,7 +633,7 @@ struct lcfs_node_s *lcfs_load_node_from_file(int dirfd, const char *fname,
}

if ((buildflags & LCFS_BUILD_SKIP_XATTRS) == 0) {
r = read_xattrs(ret, dirfd, fname);
r = read_xattrs(ret, dirfd, fname, buildflags);
if (r < 0)
return NULL;
}
Expand Down
1 change: 1 addition & 0 deletions libcomposefs/lcfs-writer.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ enum {
LCFS_BUILD_SKIP_DEVICES = (1 << 2),
LCFS_BUILD_COMPUTE_DIGEST = (1 << 3),
LCFS_BUILD_NO_INLINE = (1 << 4),
LCFS_BUILD_USER_XATTRS = (1 << 5), /* Only read user.* xattrs */
};

enum lcfs_format_t {
Expand Down
9 changes: 6 additions & 3 deletions tests/Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,12 @@ endif
EXTRA_DIST = test-checksums.sh $(patsubst %,assets/%,${TEST_ASSETS_SMALL}) $(patsubst %,assets/%.sha256_erofs,${TEST_ASSETS_SMALL})

check-checksums:
$(srcdir)/test-checksums.sh "${VALGRIND_PREFIX} $(builddir)/../tools/" "$(srcdir)/assets" "${TEST_ASSETS}"
VALGRIND_PREFIX="${VALGRIND_PREFIX}" $(srcdir)/test-checksums.sh "$(builddir)/../tools/" "$(srcdir)/assets" "${TEST_ASSETS}"

check-units:
$(srcdir)/test-units.sh "${VALGRIND_PREFIX} $(builddir)/../tools/"
VALGRIND_PREFIX="${VALGRIND_PREFIX}" $(srcdir)/test-units.sh "$(builddir)/../tools/"

check: check-units check-checksums
check-random-fuse:
VALGRIND_PREFIX="${VALGRIND_PREFIX}" $(srcdir)/test-random-fuse.sh "$(builddir)/../tools/"

check: check-units check-checksums check-random-fuse
48 changes: 44 additions & 4 deletions tests/dumpdir
Original file line number Diff line number Diff line change
Expand Up @@ -10,22 +10,58 @@ import argparse
def log_error(error):
print("Readdir error: " + error)

def should_convert_whiteout(stbuf):
return args.whiteout and stat.S_ISCHR(stbuf.st_mode) and stbuf.st_rdev == os.makedev(0,0)

def has_whiteout_child(path):
for f in os.listdir(path):
s = os.lstat(os.path.join(path, f))
if should_convert_whiteout(s):
return True
return False

def dumpfile(file, root):
rel = os.path.relpath(file, root)
s = os.lstat(file)

st_mode = s.st_mode

content = None
xattrs = {}
if should_convert_whiteout(s):
content = b''
st_mode = (st_mode & ~stat.S_IFMT(st_mode)) | stat.S_IFREG
xattrs["trusted.overlay.overlay.whiteout"] = b''
xattrs["user.overlay.whiteout"] = b''

if stat.S_ISDIR(st_mode) and has_whiteout_child(file):
xattrs["trusted.overlay.overlay.whiteouts"] = b''
xattrs["user.overlay.whiteouts"] = b''

nlink = s.st_nlink;
if args.no_nlink:
nlink = 1
print(f"{shlex.quote(rel)} {oct(s.st_mode)} {nlink} {s.st_uid}:{s.st_gid} {s.st_rdev} {s.st_mtime_ns}",end="")
if stat.S_ISREG(s.st_mode):
digest = hashlib.sha256(open(file,'rb').read()).hexdigest()
print(f"{shlex.quote(rel)} {oct(st_mode)} {nlink} {s.st_uid}:{s.st_gid} {s.st_rdev} {s.st_mtime_ns}",end="")
if stat.S_ISREG(st_mode):
if content == None:
content = open(file,'rb').read()
digest = hashlib.sha256(content).hexdigest()
print(f" {s.st_size} sha256:{digest}",end="")
elif stat.S_ISLNK(s.st_mode):
elif stat.S_ISLNK(st_mode):
link = os.readlink(file)
print(f" ->{shlex.quote(link)}",end="")

for attr in sorted(os.listxattr(file, follow_symlinks=False)):
v = os.getxattr(file, attr, follow_symlinks=False)
xattrs[attr] = v

for attr in sorted(xattrs.keys()):
v = xattrs[attr]
if args.userxattr and not attr.startswith("user."):
continue
if args.noescaped and attr.startswith("trusted.overlay.overlay."):
continue

print(f" {attr}={v}", end="")

print()
Expand All @@ -35,11 +71,15 @@ def dumpfile(file, root):
def dumpdir(root):
dumpfile(root, root)
for parent, dirs, files in os.walk(root, topdown=True, onerror=log_error):
dirs.sort()
for file in sorted(dirs + files):
dumpfile(os.path.join(parent, file), root)

argParser = argparse.ArgumentParser()
argParser.add_argument("--no-nlink", action='store_true')
argParser.add_argument("--userxattr", action='store_true')
argParser.add_argument("--whiteout", action='store_true')
argParser.add_argument("--noescaped", action='store_true')
argParser.add_argument('path')

args = argParser.parse_args()
Expand Down
195 changes: 195 additions & 0 deletions tests/gendir
Original file line number Diff line number Diff line change
@@ -0,0 +1,195 @@
#!/usr/bin/python3

import argparse
import hashlib
import os
import random
import shlex
import shutil
import stat
import string
import sys

adjectives = ["adorable", "adventurous", "aggressive", "agreeable", "alert", "alive", "amused", "angry", "annoyed", "annoying", "anxious", "arrogant", "ashamed", "attractive", "average", "awful", "bad", "beautiful", "better", "bewildered", "black", "bloody", "blue", "blue-eyed", "blushing", "bored", "brainy", "brave", "breakable", "bright", "busy", "calm", "careful", "cautious", "charming", "cheerful", "clean", "clear", "clever", "cloudy", "clumsy", "colorful", "combative", "comfortable", "concerned", "condemned", "confused", "cooperative", "courageous", "crazy", "creepy", "crowded", "cruel", "curious", "cute", "dangerous", "dark", "dead", "defeated", "defiant", "delightful", "depressed", "determined", "different", "difficult", "disgusted", "distinct", "disturbed", "dizzy", "doubtful", "drab", "dull", "eager", "easy", "elated", "elegant", "embarrassed", "enchanting", "encouraging", "energetic", "enthusiastic", "envious", "evil", "excited", "expensive", "exuberant", "fair", "faithful", "famous", "fancy", "fantastic", "fierce", "filthy", "fine", "foolish", "fragile", "frail", "frantic", "friendly", "frightened", "funny", "gentle", "gifted", "glamorous", "gleaming", "glorious", "good", "gorgeous", "graceful", "grieving", "grotesque", "grumpy", "handsome", "happy", "healthy", "helpful", "helpless", "hilarious", "homeless", "homely", "horrible", "hungry", "hurt", "ill", "important", "impossible", "inexpensive", "innocent", "inquisitive", "itchy", "jealous", "jittery", "jolly", "joyous", "kind", "lazy", "light", "lively", "lonely", "long", "lovely", "lucky", "magnificent", "misty", "modern", "motionless", "muddy", "mushy", "mysterious", "nasty", "naughty", "nervous", "nice", "nutty", "obedient", "obnoxious", "odd", "old-fashioned", "open", "outrageous", "outstanding", "panicky", "perfect", "plain", "pleasant", "poised", "poor", "powerful", "precious", "prickly", "proud", "putrid", "puzzled", "quaint", "real", "relieved", "repulsive", "rich", "scary", "selfish", "shiny", "shy", "silly", "sleepy", "smiling", "smoggy", "sore", "sparkling", "splendid", "spotless", "stormy", "strange", "stupid", "successful", "super", "talented", "tame", "tasty", "tender", "tense", "terrible", "thankful", "thoughtful", "thoughtless", "tired", "tough", "troubled", "ugliest", "ugly", "uninterested", "unsightly", "unusual", "upset", "uptight", "vast", "victorious", "vivacious", "wandering", "weary", "wicked", "wide-eyed", "wild", "witty", "worried", "worrisome", "wrong", "zany", "zealous"]

nouns = ["apple", "air", "conditioner", "airport", "ambulance", "aircraft", "apartment", "arrow", "antlers", "apro", "alligator", "architect", "ankle", "armchair", "aunt", "ball", "bermudas", "beans", "balloon", "bear", "blouse", "bed", "bow", "bread", "black", "board", "bones", "bill", "bitterness", "boxers", "belt", "brain", "buffalo", "bird", "baby", "book", "back", "butter", "bulb", "buckles", "bat", "bank", "bag", "bra", "boots", "blazer", "bikini", "bookcase", "bookstore", "bus", "stop", "brass", "brother", "boy", "blender", "bucket", "bakery", "bow", "bridge", "boat", "car", "cow", "cap", "cooker", "cheeks", "cheese", "credenza", "carpet", "crow", "crest", "chest", "chair", "candy", "cabinet", "cat", "coffee", "children", "cookware", "chaise", "longue", "chicken", "casino", "cabin", "castle", "church", "cafe", "cinema", "choker", "cravat", "cane", "costume", "cardigan", "chocolate", "crib", "couch", "cello", "cashier", "composer", "cave", "country", "computer", "canoe", "clock", "charlie", "dog", "deer", "donkey", "desk", "desktop", "dress", "dolphin", "doctor", "dentist", "drum", "dresser", "designer", "detective", "daughter", "egg", "elephant", "earrings", "ears", "eyes", "estate", "finger", "fox", "frock", "frog", "fan", "freezer", "fish", "film", "foot", "flag", "factory", "father", "farm", "forest", "flower", "fruit", "fork", "grapes", "goat", "gown", "garlic", "ginger", "giraffe", "gauva", "grains", "gas", "station", "garage", "gloves", "glasses", "gift", "galaxy", "guitar", "grandmother", "grandfather", "governor", "girl", "guest", "hamburger", "hand", "head", "hair", "heart", "house", "horse", "hen", "horn", "hat", "hammer", "hostel", "hospital", "hotel", "heels", "herbs", "host", "jacket", "jersey", "jewelry", "jaw", "jumper", "judge", "juicer", "keyboard", "kid", "kangaroo", "koala", "knife", "lemon", "lion", "leggings", "leg", "laptop", "library", "lamb", "london", "lips", "lung", "lighter", "luggage", "lamp", "lawyer", "mouse", "monkey", "mouth", "mango", "mobile", "milk", "music", "mirror", "musician", "mother", "man", "model", "mall", "museum", "market", "moonlight", "medicine", "microscope", "newspaper", "nose", "notebook", "neck", "noodles", "nurse", "necklace", "noise", "ocean", "ostrich", "oil", "orange", "onion", "oven", "owl", "paper", "panda", "pants", "palm", "pasta", "pumpkin", "pharmacist", "potato", "parfume", "panther", "pad", "pencil", "pipe", "police", "pen", "pharmacy", "petrol", "station", "police", "station", "parrot", "plane", "pigeon", "phone", "peacock", "pencil", "pig", "pouch", "pagoda", "pyramid", "purse", "pancake", "popcorn", "piano", "physician", "photographer", "professor", "painter", "park", "plant", "parfume", "radio", "razor", "ribs", "rainbow", "ring", "rabbit", "rice", "refrigerator", "remote", "restaurant", "road", "surgeon", "scale", "shampoo", "sink", "salt", "shark", "sandals", "shoulder", "spoon", "soap", "sand", "sheep", "sari", "stomach", "stairs", "soup", "shoes", "scissors", "sparrow", "shirt", "suitcase", "stove", "stairs", "snowman", "shower", "swan", "suit", "sweater", "smoke", "skirt", "sofa", "socks", "stadium", "skyscraper", "school", "sunglasses", "sandals", "slippers", "shorts", "sandwich", "strawberry", "spaghetti", "shrimp", "saxophone", "sister", "son", "singer", "senator", "street", "supermarket", "swimming", "pool", "star", "sky", "sun", "spoon", "ship", "smile", "table", "turkey", "tie", "toes", "truck", "train", "taxi", "tiger", "trousers", "tongue", "television", "teacher", "turtle", "tablet", "train", "station", "toothpaste", "tail", "theater", "trench", "coat", "tea", "tomato", "teen", "tunnel", "temple", "town", "toothbrush", "tree", "toy", "tissue", "telephone", "underwear", "uncle", "umbrella", "vest", "voice", "veterinarian", "villa", "violin", "village", "vehicle", "vase", "wallet", "wolf", "waist", "wrist", "water", "melon", "whale", "water", "wings", "whisker", "watch", "woman", "washing", "machine", "wheelchair", "waiter", "wound", "xylophone", "zebra", "zoo"]

def with_chance(chance):
return random.random() <= chance

class Chance():
def __init__(self):
self.value = random.random()
self.start = 0

def with_chance(self, chance):
if self.start > 1:
print("Too many choices")
start = self.start
end = self.start + chance
self.start = end
return self.value >= start and self.value < end

# Choose one of weighted options
def choice(self, options):
for value, chance in options:
if self.with_chance(chance):
return value
# Default to first
value, chance = options[0]
return value

def gen_dir_mode():
# For creation to work we want all dirs u+rwx
return random.choice([0o777, 0o755, 0o750, 0o700])

def gen_file_mode():
return random.choice([0o644, 0o666, 0o755, 0o777])

def gen_filename():
if not args.unreadable:
name = bytes(random.choice(adjectives) + "_" + random.choice(nouns) + str(random.randint(1,999)), "utf-8")
if len(name) > 255:
return gen_filename()
return name

name_len = random.randrange(1, 255)
name = [0] * name_len
for i in range(name_len):
c = random.randrange(1, 255)
while c == ord('/'):
c = random.randrange(1, 255)
name[i] = c
name=bytes(name)
if name == b'.' or name == b'..':
return gen_filename()
return name

def gen_filenames():
c = Chance()
# 5% of dirs are huge
if c.with_chance(0.05):
num_files = random.randrange(0, 4096)
else:
num_files = random.randrange(0, 25)

files = []
for i in range(num_files):
files.append(gen_filename())

return list(sorted(set(files)))

def gen_xattrname():
return random.choice(nouns) + str(random.randint(1,9))

def gen_xattrdata():
return bytes(random.choice(adjectives) + str(random.randint(1,9)), "utf-8")


def gen_hierarchy(root):
num_dirs = random.randrange(30, 50)
dirs = []
for i in range(num_dirs):
parent = random.choice([root] * 3 + dirs);
p = os.path.join(parent, gen_filename())
dirs.append(p)
# Sort and drop any (unlikely) duplicateds
return list(sorted(set(dirs)))

def set_user_xattr(path):
n_xattrs = random.randrange(0, 3)
for i in range(n_xattrs):
name = "user." + gen_xattrname()
value = gen_xattrdata()
os.setxattr(path, name, value, follow_symlinks=False)

old_files = []
def make_regular_file(path):
with os.fdopen(os.open(path, os.O_WRONLY|os.O_CREAT, gen_file_mode()), 'wb') as fd:
c = Chance();
# 5% of reuse old file data
if len(old_files) > 0 and c.with_chance(0.05):
reused = random.choice(old_files)
with os.fdopen(os.open(reused, os.O_RDONLY), 'rb') as src:
shutil.copyfileobj(src, fd)
return

# 5% of files are large
if c.with_chance(0.05):
size = random.randrange(0, 4*1024*1024)
else: # Rest are small
size = random.randrange(0, 256)

data = random.randbytes(size)
fd.write(data)
# Save path for reuse
old_files.append(path)

set_user_xattr(path)

def make_symlink(path):
target = gen_filename()
os.symlink(target, path)

def make_node(path):
if not args.privileged:
return
target = gen_filename()
os.mknod(path, gen_file_mode() | random.choice([stat.S_IFCHR,stat.S_IFBLK]), os.makedev(0,0))

def make_whiteout(path):
if args.nowhiteout:
return
target = gen_filename()
os.mknod(path, gen_file_mode() | stat.S_IFCHR, device=os.makedev(0,0))

def make_fifo(path):
target = gen_filename()
os.mknod(path, gen_file_mode() | stat.S_IFIFO)

def make_file(path):
c = Chance();
f = c.choice([
(make_regular_file, 0.7),
(make_symlink, 0.15),
(make_fifo, 0.05),
(make_node, 0.05),
(make_whiteout, 0.05)
])
f(path)

def make_dir(path, dirs):
os.mkdir(path, mode=gen_dir_mode())
set_user_xattr(path)
files = gen_filenames()
for f in files:
child_path = os.path.join(path, f)
if child_path in dirs:
continue

func = random.choice([make_file])
func(child_path)

argParser = argparse.ArgumentParser()
argParser.add_argument("--seed")
argParser.add_argument("--unreadable", action='store_true')
argParser.add_argument("--privileged", action='store_true')
argParser.add_argument("--nowhiteout", action='store_true')
argParser.add_argument('path')

args = argParser.parse_args()

if args.seed:
seed = args.seed
else:
seed = os.urandom(16).hex()
random.seed(seed)
print(f"Using seed '{seed}'")

# Generate tree structure
root = bytes(args.path,"utf-8")
dirs = gen_hierarchy(root)

make_dir(root, dirs)
for d in dirs:
make_dir(d, dirs)
Loading

0 comments on commit 420cd38

Please sign in to comment.