Skip to content

Commit

Permalink
properly test zip files. Fix scenes zips
Browse files Browse the repository at this point in the history
  • Loading branch information
sdumetz committed Dec 13, 2024
1 parent 8bb7dc3 commit 2f2b0e3
Show file tree
Hide file tree
Showing 7 changed files with 194 additions and 78 deletions.
36 changes: 35 additions & 1 deletion source/server/routes/scenes/get.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ import UserManager from "../../auth/UserManager.js";
import { read_cdh } from "../../utils/zip/index.js";
import { HandleMock } from "../../utils/zip/zip.test.js";
import Vfs from "../../vfs/index.js";
import path from "path";
import { tmpdir } from "os";
import { execFile } from "child_process";



Expand Down Expand Up @@ -50,10 +53,12 @@ describe("GET /scenes", function(){
it("can send a zip file", async function(){
let res = await request(this.server).get("/scenes")
.set("Accept", "application/zip")
.responseType('blob')
.expect(200)
.expect("Content-Type", "application/zip");

let b :any = Buffer.from(res.text, "binary");
let b :any = res.body;
expect(b).to.be.instanceof(Buffer);
expect(b).to.have.property("length").above(0);
let handle = HandleMock.Create(b);
let headers = [];
Expand All @@ -69,6 +74,35 @@ describe("GET /scenes", function(){
"scenes/foo/models/",
]);
});

it("returned zip file is valid", async function(){
await vfs.writeDoc(`{"hello": "world"}`, {scene: "foo", name: "scene.svx.json", user_id: 0});
await vfs.writeFile(dataStream(["hello world \n"]), {scene: "bar", name: "articles/hello.html", user_id: 0});

let res = await request(this.server).get("/scenes")
.set("Accept", "application/zip")
.responseType('blob')
.expect(200)
.expect("Content-Type", "application/zip");

let b :any = res.body;
expect(b).to.be.instanceof(Buffer);
expect(b).to.have.property("length").above(0);

let dir = await fs.mkdtemp(path.join(tmpdir(), "eCorpus-zip-file-test"));
try{
let file = path.join(dir, "test.zip");
await fs.writeFile(file, b);
await expect(new Promise<void>((resolve, reject)=>{
execFile("unzip", ["-t", file], (error, stdout, stderr)=>{
if(error) reject(error);
else resolve();
});
})).to.be.fulfilled;
}finally{
await fs.rm(dir, {recursive: true, force: true}).catch(()=>{});
}
})

describe("can get a list of scenes", function(){
let scenes:number[];
Expand Down
4 changes: 2 additions & 2 deletions source/server/routes/scenes/post.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ export default async function postScenes(req :Request, res :Response){
if(!name){
//Create the scene
try{
console.log("create scene");
console.log("create scene: ", scene);
await vfs.createScene(scene, requester.uid);
}catch(e){
if((e as HTTPError).code != 409) throw e;
Expand Down Expand Up @@ -85,7 +85,7 @@ export default async function postScenes(req :Request, res :Response){

results.ok.push(`${scene}/${name}`);
}
console.log("Scenes : ", await vfs.getScene("foo"));

}).finally(() => fs.rm(tmpfile, {force: true}));

res.status(200).send(results);
Expand Down
166 changes: 104 additions & 62 deletions source/server/routes/scenes/scene/get.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import request from "supertest";
import { expect } from "chai";

import UserManager from "../../../auth/UserManager.js";
import { read_cdh } from "../../../utils/zip/index.js";
import { parse_file_header, read_cdh } from "../../../utils/zip/index.js";
import { HandleMock } from "../../../utils/zip/zip.test.js";
import Vfs from "../../../vfs/index.js";

Expand Down Expand Up @@ -38,78 +38,120 @@ describe("GET /scenes/:scene", function(){
});

describe("as application/zip", function(){
it("download a zip file", async function(){
let t = new Date("2023-05-03T13:34:26.000Z");
let t = new Date("2023-05-03T13:34:26.000Z");
this.beforeEach(async function(){
await vfs._db.run(`UPDATE files SET ctime = datetime("${t.toISOString()}")`);

});
it("download a zip file", async function(){
let res = await request(this.server).get("/scenes/foo")
.set("Accept", "application/zip")
.responseType('blob')
.expect(200)
.expect("Content-Type", "application/zip");
let b :any = Buffer.from(res.text, "binary");
let b :Buffer = res.body;
expect(Buffer.isBuffer(b)).to.be.true;
expect(b).to.have.property("length").above(0);
let handle = HandleMock.Create(b);
let headers = [];
for await(let header of read_cdh(handle)){
headers.push(header);
}
expect(headers).to.deep.equal([
{
filename: 'foo/',
crc: 0,
size: 0,
compressedSize: 0,
dosMode: 16,
unixMode: 16893,
offset: 0,
},
{
filename: 'foo/articles/',
crc: 0,
size: 0,
compressedSize: 0,
dosMode: 16,
unixMode: 16893,
offset: 50,
},
{
filename: 'foo/articles/hello-world.html',
crc: 2117232125,
size: 4,
compressedSize: 4,
dosMode: 0,
unixMode: 65021,
offset: 109,
},
{
filename: 'foo/models/',
crc: 0,
size: 0,
compressedSize: 0,
dosMode: 16,
unixMode: 16893,
offset: 253
},
{
filename: 'foo/scene.svx.json',
crc: 1107104509,
size: 2,
compressedSize: 2,
dosMode: 0,
unixMode: 65021,
offset: 253
}
].map(h =>({
...h,
extra:'', flags: 2056, mtime: new Date("2023-07-29T13:34:26.000Z"),

})));
});
describe("zip file validation", function(){
let b :Buffer;
this.beforeEach(async function(){
let res = await request(this.server).get("/scenes/foo")
.set("Accept", "application/zip")
.responseType('blob')
.expect(200)
.expect("Content-Type", "application/zip");
b = res.body;
expect(b).to.be.instanceof(Buffer);
expect(b).to.have.property("length").above(0);
});

it("can parse its own zips", async function(){
let handle = HandleMock.Create(b);
let headers_expected = [
{ filename: 'foo/', crc: 0,
size: 0, compressedSize: 0,
dosMode: 16, unixMode: 16893,
offset: 0,
},
{ filename: 'foo/articles/', crc: 0,
size: 0, compressedSize: 0,
dosMode: 16, unixMode: 16893,
offset: 50,
},
{
filename: 'foo/articles/hello-world.html', crc: 2117232040,
size: 4, compressedSize: 4,
dosMode: 0, unixMode: 65021,
offset: 109, data: "foo\n",
},
{
filename: 'foo/models/', crc: 0,
size: 0, compressedSize: 0,
dosMode: 16, unixMode: 16893,
offset: 188
},
{
filename: 'foo/scene.svx.json', crc: 2745614147,
size: 2, compressedSize: 2,
dosMode: 0, unixMode: 65021,
offset: 245, data: "{}"
}
];

let index = 0;
for await(let header of read_cdh(handle)){
let exp = headers_expected[index];
if(!exp) break;

expect(header).to.have.property("filename", exp.filename);
expect(header, `${header.filename} dosMode`).to.have.property("dosMode", exp.dosMode);
//expect(header, `${header.filename} unixMode`).to.have.property("unixMode", exp.unixMode);
expect(header, `${header.filename} extras`).to.have.property("extra", '');
expect(header, `${header.filename} flags`).to.have.property("flags", 2056);
expect(header, `${header.filename} mtime`).to.have.property("mtime").deep.equal(t);

expect(header, `${header.filename} size`).to.have.property("size", exp.size);
expect(header, `${header.filename} compressedSize`).to.have.property("compressedSize", exp.compressedSize);
expect(header, `${header.filename} offset`).to.have.property("offset", exp.offset);
let dataLength = 30 /* header length*/ + Buffer.byteLength(exp.filename) + exp.compressedSize;
let next = headers_expected[index + 1];
if(next){
expect(header.offset + dataLength + 16, `Expect ${next.filename} to be just after data of ${header.filename}`).to.equal(next.offset);
}

let data = b.slice(exp.offset, exp.offset + dataLength);

const fileHeader = parse_file_header(data.slice(0, 30 +Buffer.byteLength(exp.filename)));
expect(fileHeader).to.have.property("filename", exp.filename);
expect(fileHeader).to.have.property("mtime").deep.equal(t);
expect(fileHeader).to.have.property("flags", 2056);
expect(fileHeader).to.have.property("extra", "");


if(exp.data){
expect(data.slice(30, 30 + Buffer.byteLength(header.filename)).toString('utf-8')).to.equal(exp.filename);
expect(data.slice(-header.compressedSize).toString("utf8"), `Actual data content:${data.toString("utf-8")}`).to.equal(exp.data);
}
expect(header, `${header.filename} crc`).to.have.property("crc", exp.crc);
index++;
}

expect(index ,`Bad number of headers returned. Expected ${headers_expected.length} but index is ${index}`).to.equal(headers_expected.length);

});
})

it("can use query params to set request format", async function(){
await request(this.server).get("/scenes/foo?format=zip")
let res = await request(this.server).get("/scenes/foo?format=zip")
.responseType('blob')
.expect(200)
.expect("Content-Type", "application/zip");

let b :Buffer = res.body;
expect(Buffer.isBuffer(b)).to.be.true;

//Verify zip is valid using unzip's check mode

});

});
Expand Down
10 changes: 10 additions & 0 deletions source/server/utils/zip/crc32.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,14 @@ describe("crc32", function(){
crc.next(Buffer.from("\n"));
expect((crc.next().value).toString(16)).to.equal(0xaf083b2d.toString(16));
});
it("checks known values", function(){
([
["{}", 2745614147],
["foo\n", 2117232040],
] as [string, number][]).forEach(([s, sum])=>{
let crc = crc32();
crc.next(Buffer.from(s))
expect(crc.next().value, `Expected CRC32 sum for ${s} to be ${sum}`).to.equal(sum);
});
})
});
42 changes: 30 additions & 12 deletions source/server/utils/zip/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,26 @@ export function create_file_header({ filename, extra="", mtime, flags } :FileHea
return header;
}

export function create_data_descriptor({size, compressedSize=size, crc}:{size:number,compressedSize?:number, crc:number}):Buffer{
let dd = Buffer.alloc(data_descriptor_size);

export function parse_file_header(b :Buffer) :FileHeader{
const start_bytes = b.readUInt32LE(0);
assert(start_bytes === 0x04034b50, `Not a valid zip file header. expected 0x04034b50 but starting with 0x${start_bytes.toString(16)}`);
const version = b.readUInt16LE(4);
const flags = b.readUInt16LE(6);
const dosTime =b.readUInt32LE(10);
const mtime = DateTime.toUnix(dosTime);

const name_length = b.readUInt16LE(26);
const extra_length = b.readUInt16LE(28);

const filename = b.slice(30, 30+name_length).toString("utf-8");
const extra = b.slice(30+name_length, 30+name_length+extra_length).toString("utf-8");
return {filename, mtime, extra, flags};
}


export function create_data_descriptor({size, compressedSize=size, crc}: {size:number,compressedSize?:number, crc:number}):Buffer{
let dd = Buffer.allocUnsafe(data_descriptor_size);
dd.writeUInt32LE(0x08074b50, 0);
dd.writeUInt32LE(crc, 4);
dd.writeUInt32LE(compressedSize, 8) //Compressed size
Expand All @@ -111,8 +129,8 @@ export function create_cd_header({filename, mtime, extra="", dosMode, unixMode,
let name_length = Buffer.byteLength(filename);
let extra_length = Buffer.byteLength(extra);
//Construct central directory record
let cdr = Buffer.alloc(cd_header_length + name_length + extra_length);

let cdr = Buffer.allocUnsafe(cd_header_length + name_length + extra_length);
cdr.writeUInt32LE(0x02014b50, 0); // Signature
cdr.writeUInt16LE( 3 << 8 | 20, 4); // made by UNIX with zip v2.0
cdr.writeUInt16LE(20, 6); // need version 2.0 to extract
Expand Down Expand Up @@ -141,8 +159,9 @@ export function isDirectory(h:CDHeader):boolean{
}

export function parse_cd_header(cd :Buffer, offset :number) :CDHeader & {length:number}{
let cdh = cd.slice(offset, offset +cd_header_length);

let cdh = cd.slice(offset, offset + cd_header_length);
const signature = cd.readUInt32LE(0);
assert(signature === 0x02014b50,`Expect header to begin with 0x02014b50 but found 0x${signature.toString(16)}`)
let mtime = DateTime.toUnix(cdh.readUInt32LE(12));
let name_length = cdh.readUInt16LE(28);
let extra_length = cdh.readUInt16LE(30);
Expand All @@ -156,10 +175,10 @@ export function parse_cd_header(cd :Buffer, offset :number) :CDHeader & {length:
// compression: cdh.readUInt16LE(8),
// 12 last mod time
//14 last mod date
mtime,
crc: cdh.readUInt32LE(16),
compressedSize: cdh.readUInt32LE(20), // 20 compressed size
size: cdh.readUInt32LE(24),
mtime,
// 28 file name length
// 30 extra field length
// 32 comment length
Expand All @@ -181,7 +200,7 @@ export function parse_cd_header(cd :Buffer, offset :number) :CDHeader & {length:
*/
export async function *zip(files :AsyncIterable<ZipEntry>|Iterable<ZipEntry>, {comments = "" }={}) :AsyncGenerator<Buffer,void,unknown>{

let cd = Buffer.alloc(0);
let cd = Buffer.allocUnsafe(0);
let files_count = 0, archive_size = 0;

let flag_bits = flags.USE_DATA_DESCRIPTOR | flags.UTF_FILENAME;
Expand All @@ -200,7 +219,7 @@ export async function *zip(files :AsyncIterable<ZipEntry>|Iterable<ZipEntry>, {c
mtime = new Date("1980-01-01T0:0:0Z");
}

let local_header_offset = archive_size;
const local_header_offset = archive_size;

//File header
let header = create_file_header({filename, mtime, flags: flag_bits});
Expand All @@ -227,7 +246,7 @@ export async function *zip(files :AsyncIterable<ZipEntry>|Iterable<ZipEntry>, {c
//Construct central directory record for later use
let cdr = create_cd_header({
filename,
compressedSize:size,
compressedSize: size,
size,
crc,
flags: flag_bits,
Expand Down Expand Up @@ -303,13 +322,12 @@ export async function zip_read_eocd(handle :FileHandle){
*/
export async function *read_cdh(handle : FileHandle) :AsyncGenerator<CDHeader, void, void >{
let eocd = await zip_read_eocd(handle);
let cd = Buffer.alloc(eocd.cd_size);
let cd = Buffer.allocUnsafe(eocd.cd_size);
let bytes = (await handle.read({buffer:cd, position: eocd.cd_start})).bytesRead;
assert( bytes == cd.length, `Can't read Zip Central Directory Records (missing ${cd.length - bytes} of ${cd.length} bytes)`);
let offset = 0;
while(offset < eocd.cd_size){
let {length, ...header} = parse_cd_header(cd, offset);
//FIXME verify file header
yield header;
offset = offset + length;
}
Expand Down
2 changes: 1 addition & 1 deletion source/server/vfs/Files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ export default abstract class FilesVfs extends BaseVfs{
if(!r.hash && !r.data) throw new NotFoundError(`Trying to open deleted file : ${ r.name }`);
if(r.hash === "directory") return r;

let handle = (typeof r.data === "string")? Readable.from([r.data]): (await this.openFile({hash: r.hash!})).createReadStream();
let handle = (typeof r.data === "string")? Readable.from([Buffer.from(r.data)]): (await this.openFile({hash: r.hash!})).createReadStream();
return {
...r,
stream: handle,
Expand Down
Loading

0 comments on commit 2f2b0e3

Please sign in to comment.