Skip to content

Commit

Permalink
Add tool for lwip httpd server (#1600)
Browse files Browse the repository at this point in the history
It would be helpful to be able to use the lwip httpd server, but it
generates the content using a tool written in C. This is problematic as
it requires a native compiler to build the tools.

Add a python tool to generate the httpd content and a cmake function to
make use of it.
  • Loading branch information
peterharperuk authored Jan 16, 2024
1 parent 8353cb6 commit 62bb486
Show file tree
Hide file tree
Showing 3 changed files with 193 additions and 0 deletions.
1 change: 1 addition & 0 deletions src/rp2_common/pico_lwip/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -302,5 +302,6 @@ if (EXISTS ${PICO_LWIP_PATH}/${LWIP_TEST_PATH})
pico_lwip_contrib_freertos
pico_rand)

pico_add_subdirectory(tools)
pico_promote_common_scope_vars()
endif()
20 changes: 20 additions & 0 deletions src/rp2_common/pico_lwip/tools/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Compile the http content into a source file "pico_fsdata.inc" in a format suitable for the lwip httpd server
# Pass the target library name library type and the list of httpd content
function(pico_set_lwip_httpd_content TARGET_LIB TARGET_TYPE)
find_package (Python3 REQUIRED COMPONENTS Interpreter)
set(HTTPD_CONTENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}/generated")
set(HTTPD_CONTENT_TARGET "${TARGET_LIB}_pico_set_lwip_httpd_content")
set(HTTPD_CONTENT_OUTPUT_NAME "pico_fsdata.inc")
set(HTTPD_CONTENT_TOOL "${PICO_SDK_PATH}/src/rp2_common/pico_lwip/tools/makefsdata.py")
add_custom_target(${HTTPD_CONTENT_TARGET} DEPENDS ${HTTPD_CONTENT_BINARY_DIR}/${HTTPD_CONTENT_OUTPUT_NAME})
add_custom_command(
OUTPUT ${HTTPD_CONTENT_BINARY_DIR}/${HTTPD_CONTENT_OUTPUT_NAME}
DEPENDS ${HTTPD_CONTENT_TOOL} ${ARGN}
COMMAND ${CMAKE_COMMAND} -E make_directory ${HTTPD_CONTENT_BINARY_DIR} &&
${Python3_EXECUTABLE} ${HTTPD_CONTENT_TOOL} -i ${ARGN} -o ${HTTPD_CONTENT_BINARY_DIR}/${HTTPD_CONTENT_OUTPUT_NAME}
VERBATIM)
target_include_directories(${TARGET_LIB} ${TARGET_TYPE}
${HTTPD_CONTENT_BINARY_DIR}
)
add_dependencies(${TARGET_LIB} ${HTTPD_CONTENT_TARGET})
endfunction()
172 changes: 172 additions & 0 deletions src/rp2_common/pico_lwip/tools/makefsdata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
#!/usr/bin/env python3
import argparse
from pathlib import Path

file_types = {
"html": "text/html",
"htm": "text/html",
"shtml": "text/html",
"shtm": "text/html",
"ssi": "text/html",
"gif": "image/gif",
"png": "image/png",
"jpg": "image/jpeg",
"bmp": "image/bmp",
"ico": "image/x-icon",
"class": "application/octet-stream",
"cls": "application/octet-stream",
"js": "application/javascript",
"ram": "application/javascript",
"css": "text/css",
"swf": "application/x-shockwave-flash",
"xml": "text/xml",
"xsl": "application/pdf",
"pdf": "text/xml",
"json": "application/json",
"svg": "image/svg+xml"
}

response_types = {
200: "HTTP/1.0 200 OK",
400: "HTTP/1.0 400 Bad Request",
404: "HTTP/1.0 404 File not found",
501: "HTTP/1.0 501 Not Implemented",
}

PAYLOAD_ALIGNMENT = 4
HTTPD_SERVER_AGENT = "lwIP/2.2.0d (http://savannah.nongnu.org/projects/lwip)"
LWIP_HTTPD_SSI_EXTENSIONS = [".shtml", ".shtm", ".ssi", ".xml", ".json"]

def process_file(input_dir, file):
results = []

# Check content type
content_type = file_types[file.suffix[1:].lower()]
if content_type is None:
raise RuntimeError(f"Unsupported file type {file.suffix}")

# file name
data = f"/{file.relative_to(input_dir)}\x00"
comment = f"\"/{file.relative_to(input_dir)}\" ({len(data)} chars)"
while(len(data) % PAYLOAD_ALIGNMENT != 0):
data += "\x00"
results.append({'data': bytes(data, "utf-8"), 'comment': comment});

# Header
response_type = 200
for response_id in response_types:
if file.name.startswith(f"{response_id}."):
response_type = response_id
break
data = f"{response_types[response_type]}\r\n"
comment = f"\"{response_types[response_type]}\" ({len(data)} chars)"
results.append({'data': bytes(data, "utf-8"), 'comment': comment});

# user agent
data = f"Server: {HTTPD_SERVER_AGENT}\r\n"
comment = f"\"Server: {HTTPD_SERVER_AGENT}\" ({len(data)} chars)"
results.append({'data': bytes(data, "utf-8"), 'comment': comment});

if file.suffix not in LWIP_HTTPD_SSI_EXTENSIONS:
# content length
file_size = file.stat().st_size
data = f"Content-Length: {file_size}\r\n"
comment = f"\"Content-Length: {file_size}\" ({len(data)} chars)"
results.append({'data': bytes(data, "utf-8"), 'comment': comment});

# content type
data = f"Content-Type: {content_type}\r\n\r\n"
comment = f"\"Content-Type: {content_type}\" ({len(data)} chars)"
results.append({'data': bytes(data, "utf-8"), 'comment': comment});

# file contents
data = file.read_bytes()
comment = f"raw file data ({len(data)} bytes)"
results.append({'data': data, 'comment': comment});

return results;

def process_file_list(fd, input):
data = []
fd.write("#include \"lwip/apps/fs.h\"\n")
fd.write("\n")
# generate the page contents
input_dir = None
for name in input:
file = Path(name)
if not file.is_file():
raise RuntimeError(f"File not found: {name}")
# Take the input directory from the first file
if input_dir is None:
input_dir = file.parent
results = process_file(input_dir, file)

# make a variable name
var_name = str(file.relative_to(input_dir))
var_name = var_name.replace(".", "_")
var_name = var_name.replace("/", "_")
data_var = f"data_{var_name}"
file_var = f"file_{var_name}"

# variable containing the raw data
fd.write(f"static const unsigned char {data_var}[] = {{\n")
for entry in results:
fd.write(f"\n /* {entry['comment']} */\n")
byte_count = 0;
for b in entry['data']:
if byte_count % 16 == 0:
fd.write(" ")
byte_count += 1
fd.write(f"0x{b:02x},")
if byte_count % 16 == 0:
fd.write("\n")
if byte_count % 16 != 0:
fd.write("\n")
fd.write(f"}};\n\n")

# set the flags
flags = "FS_FILE_FLAGS_HEADER_INCLUDED"
if file.suffix not in LWIP_HTTPD_SSI_EXTENSIONS:
flags += " | FS_FILE_FLAGS_HEADER_PERSISTENT"
else:
flags += " | FS_FILE_FLAGS_SSI"

# add variable details to the list
data.append({'data_var': data_var, 'file_var': file_var, 'name_size': len(results[0]['data']), 'flags': flags})

# generate the page details
last_var = "NULL"
for entry in data:
fd.write(f"const struct fsdata_file {entry['file_var']}[] = {{{{\n")
fd.write(f" {last_var},\n")
fd.write(f" {entry['data_var']},\n")
fd.write(f" {entry['data_var']} + {entry['name_size']},\n")
fd.write(f" sizeof({entry['data_var']}) - {entry['name_size']},\n")
fd.write(f" {entry['flags']},\n")
fd.write(f"}}}};\n\n")
last_var = entry['file_var']
fd.write(f"#define FS_ROOT {last_var}\n")
fd.write(f"#define FS_NUMFILES {len(data)}\n")

def run_tool():
parser = argparse.ArgumentParser(prog="makefsdata.py", description="Generates a source file for the lwip httpd server")
parser.add_argument(
"-i",
"--input",
help="input files to add as http content",
required=True,
nargs='+'
)
parser.add_argument(
"-o",
"--output",
help="name of the source file to generate",
required=True,
)
args = parser.parse_args()
print(args.input)
with open(args.output, "w") as fd:
process_file_list(fd, args.input)

if __name__ == "__main__":
run_tool()

0 comments on commit 62bb486

Please sign in to comment.