diff --git a/meson.build b/meson.build index 9b639bbc..077a5528 100644 --- a/meson.build +++ b/meson.build @@ -41,6 +41,8 @@ deps = [] if host_machine.system() == 'darwin' add_project_arguments('-x', 'objective-c', language: 'c') fworks = [ + 'CoreFoundation', + 'CFNetwork', ] foreach fkit : fworks deps += dependency('appleframeworks', modules: fkit) @@ -57,27 +59,12 @@ if host_machine.system() == 'linux' endif if host_machine.system() == 'windows' - deps += cc.find_library('d3d11') - deps += cc.find_library('ws2_32', required:true) - deps += cc.find_library('dbghelp') - deps += cc.find_library('winmm') - deps += cc.find_library('setupapi') - deps += cc.find_library('imm32') - deps += cc.find_library('version') - deps += cc.find_library('cfgmgr32') deps += cc.find_library('bcrypt') + deps += cc.find_library('winhttp') link += ['-static', '-static-libgcc', '-static-libstdc++'] add_project_link_arguments('-static-libgcc', '-static-libstdc++', language: ['c', 'cpp']) endif -# Try to find system-installed curl first -curl_dep = dependency('libcurl', static: true, required: false) -if not curl_dep.found() - message('⚙ System curl not found, building subproject...') - deps += dependency('libcurl', static:true) -else - deps += curl_dep -endif if host_machine.system() != 'emscripten' # Try to find system-installed enet first @@ -117,7 +104,7 @@ if host_machine.system() != 'emscripten' endif endif -link += '-rdynamic' +#link += '-rdynamic' link_args = link sources = [] diff --git a/scripts/fd.c b/scripts/fd.c index 0243e4ba..fda54344 100644 --- a/scripts/fd.c +++ b/scripts/fd.c @@ -684,10 +684,17 @@ JSC_CCALL(fd_is_link, const char *path = JS_ToCString(js, argv[0]); if (!path) return JS_EXCEPTION; +#ifdef _WIN32 + DWORD attrs = GetFileAttributesA(path); + JS_FreeCString(js, path); + int is_link = (attrs != INVALID_FILE_ATTRIBUTES && (attrs & FILE_ATTRIBUTE_REPARSE_POINT)); + return JS_NewBool(js, is_link); +#else struct stat st; int is_link = (lstat(path, &st) == 0 && S_ISLNK(st.st_mode)); JS_FreeCString(js, path); return JS_NewBool(js, is_link); +#endif ) JSC_CCALL(fd_readlink, diff --git a/scripts/http.c b/scripts/http.c index ff152210..cfa1a4c3 100644 --- a/scripts/http.c +++ b/scripts/http.c @@ -1,6 +1,291 @@ #include "cell.h" + + +#if defined(_WIN32) +#include +#include +#include + +typedef unsigned char par_byte; + +static void par_easycurl_init(unsigned int flags) { + (void)flags; +} + +static int par_easycurl_to_memory(char const *url, par_byte **data, int *nbytes) { + if (!url || !data || !nbytes) return 0; + *data = NULL; + *nbytes = 0; + + int success = 0; + HINTERNET hSession = NULL, hConnect = NULL, hRequest = NULL; + wchar_t *wUrl = NULL, *wHost = NULL, *wPath = NULL; + char *buffer = NULL; + int bufferSize = 0; + + int len = MultiByteToWideChar(CP_UTF8, 0, url, -1, NULL, 0); + if (len <= 0) goto cleanup; + wUrl = (wchar_t *)malloc(len * sizeof(wchar_t)); + if (!wUrl) goto cleanup; + MultiByteToWideChar(CP_UTF8, 0, url, -1, wUrl, len); + + URL_COMPONENTS urlComp; + ZeroMemory(&urlComp, sizeof(urlComp)); + urlComp.dwStructSize = sizeof(urlComp); + urlComp.dwHostNameLength = (DWORD)-1; + urlComp.dwUrlPathLength = (DWORD)-1; + + if (!WinHttpCrackUrl(wUrl, (DWORD)wcslen(wUrl), 0, &urlComp)) goto cleanup; + + wHost = (wchar_t *)malloc((urlComp.dwHostNameLength + 1) * sizeof(wchar_t)); + if (!wHost) goto cleanup; + wcsncpy(wHost, urlComp.lpszHostName, urlComp.dwHostNameLength); + wHost[urlComp.dwHostNameLength] = L'\0'; + + wPath = (wchar_t *)malloc((urlComp.dwUrlPathLength + 1) * sizeof(wchar_t)); + if (!wPath) goto cleanup; + wcsncpy(wPath, urlComp.lpszUrlPath, urlComp.dwUrlPathLength); + wPath[urlComp.dwUrlPathLength] = L'\0'; + + hSession = WinHttpOpen(L"cell/1.0", WINHTTP_ACCESS_TYPE_DEFAULT_PROXY, + WINHTTP_NO_PROXY_NAME, WINHTTP_NO_PROXY_BYPASS, 0); + if (!hSession) goto cleanup; + + hConnect = WinHttpConnect(hSession, wHost, urlComp.nPort, 0); + if (!hConnect) goto cleanup; + + DWORD dwFlags = (urlComp.nScheme == INTERNET_SCHEME_HTTPS) ? WINHTTP_FLAG_SECURE : 0; + + hRequest = WinHttpOpenRequest(hConnect, L"GET", wPath, NULL, + WINHTTP_NO_REFERER, WINHTTP_DEFAULT_ACCEPT_TYPES, dwFlags); + if (!hRequest) goto cleanup; + + if (!WinHttpSendRequest(hRequest, WINHTTP_NO_ADDITIONAL_HEADERS, 0, + WINHTTP_NO_REQUEST_DATA, 0, 0, 0)) goto cleanup; + + if (!WinHttpReceiveResponse(hRequest, NULL)) goto cleanup; + + DWORD dwStatusCode = 0; + DWORD dwSize = sizeof(dwStatusCode); + if (!WinHttpQueryHeaders(hRequest, WINHTTP_QUERY_STATUS_CODE | WINHTTP_QUERY_FLAG_NUMBER, + WINHTTP_HEADER_NAME_BY_INDEX, &dwStatusCode, &dwSize, WINHTTP_NO_HEADER_INDEX)) { + goto cleanup; + } + + if (dwStatusCode >= 400) goto cleanup; + + buffer = (char *)malloc(1); + if (!buffer) goto cleanup; + buffer[0] = 0; + + for (;;) { + DWORD dwAvailable = 0; + if (!WinHttpQueryDataAvailable(hRequest, &dwAvailable)) goto cleanup; + if (dwAvailable == 0) break; + + char *newBuffer = (char *)realloc(buffer, bufferSize + dwAvailable + 1); + if (!newBuffer) goto cleanup; + buffer = newBuffer; + + DWORD dwDownloaded = 0; + if (!WinHttpReadData(hRequest, buffer + bufferSize, dwAvailable, &dwDownloaded)) goto cleanup; + bufferSize += dwDownloaded; + buffer[bufferSize] = 0; + } + + *data = (par_byte *)buffer; + *nbytes = bufferSize; + success = 1; + buffer = NULL; + +cleanup: + if (wUrl) free(wUrl); + if (wHost) free(wHost); + if (wPath) free(wPath); + if (buffer) free(buffer); + if (hRequest) WinHttpCloseHandle(hRequest); + if (hConnect) WinHttpCloseHandle(hConnect); + if (hSession) WinHttpCloseHandle(hSession); + return success; +} + +#elif defined(__EMSCRIPTEN__) +#include +#include +#include + +typedef unsigned char par_byte; + +static void par_easycurl_init(unsigned int flags) { + (void)flags; +} + +typedef struct { + par_byte *data; + int nbytes; + int complete; + int success; +} fetch_result; + +static void on_fetch_success(emscripten_fetch_t *fetch) { + fetch_result *res = (fetch_result *)fetch->userData; + res->nbytes = fetch->numBytes; + res->data = (par_byte *)malloc((size_t)res->nbytes + 1); + if (!res->data) { + res->success = 0; + } else { + memcpy(res->data, fetch->data, (size_t)res->nbytes); + res->data[res->nbytes] = 0; + res->success = 1; + } + res->complete = 1; + emscripten_fetch_close(fetch); +} + +static void on_fetch_failure(emscripten_fetch_t *fetch) { + fetch_result *res = (fetch_result *)fetch->userData; + res->success = 0; + res->complete = 1; + emscripten_fetch_close(fetch); +} + +static int par_easycurl_to_memory(char const *url, par_byte **data, int *nbytes) { + if (!url || !data || !nbytes) return 0; + *data = NULL; + *nbytes = 0; + + fetch_result res = {0}; + emscripten_fetch_attr_t attr; + emscripten_fetch_attr_init(&attr); + strcpy(attr.requestMethod, "GET"); + attr.attributes = EMSCRIPTEN_FETCH_LOAD_TO_MEMORY; + attr.userData = &res; + attr.onsuccess = on_fetch_success; + attr.onerror = on_fetch_failure; + attr.attributes |= EMSCRIPTEN_FETCH_SYNCHRONOUS; + + emscripten_fetch_t *fetch = emscripten_fetch(&attr, url); + if (!fetch) return 0; + + if (!res.complete || !res.success) return 0; + + *data = res.data; + *nbytes = res.nbytes; + return 1; +} + +#elif defined(__APPLE__) +#include +#include +#include +#include + +typedef unsigned char par_byte; + +static void par_easycurl_init(unsigned int flags) { + (void)flags; +} + +typedef struct { + CFMutableDataRef data; + int success; + int complete; +} cf_result; + +static void cf_response_callback(CFReadStreamRef stream, CFStreamEventType type, void *clientCallBackInfo) { + cf_result *res = (cf_result *)clientCallBackInfo; + if (type == kCFStreamEventHasBytesAvailable) { + UInt8 buffer[4096]; + CFIndex bytesRead = CFReadStreamRead(stream, buffer, sizeof(buffer)); + if (bytesRead > 0) { + CFDataAppendBytes(res->data, buffer, bytesRead); + } + } else if (type == kCFStreamEventEndEncountered) { + res->success = 1; + res->complete = 1; + CFReadStreamClose(stream); + } else if (type == kCFStreamEventErrorOccurred) { + res->success = 0; + res->complete = 1; + CFReadStreamClose(stream); + } +} + +static int par_easycurl_to_memory(char const *url, par_byte **data, int *nbytes) { + if (!url || !data || !nbytes) return 0; + *data = NULL; + *nbytes = 0; + + CFStringRef cfurl = CFStringCreateWithCString(NULL, url, kCFStringEncodingUTF8); + if (!cfurl) return 0; + CFURLRef cfurlRef = CFURLCreateWithString(NULL, cfurl, NULL); + CFRelease(cfurl); + if (!cfurlRef) return 0; + + CFHTTPMessageRef request = CFHTTPMessageCreateRequest(NULL, CFSTR("GET"), cfurlRef, kCFHTTPVersion1_1); + CFRelease(cfurlRef); + if (!request) return 0; + + CFReadStreamRef stream = CFReadStreamCreateForHTTPRequest(NULL, request); + CFRelease(request); + if (!stream) return 0; + + cf_result res; + res.data = CFDataCreateMutable(NULL, 0); + res.success = 0; + res.complete = 0; + if (!res.data) { + CFRelease(stream); + return 0; + } + + CFStreamClientContext ctx = {0, &res, NULL, NULL, NULL}; + CFOptionFlags events = kCFStreamEventHasBytesAvailable | kCFStreamEventErrorOccurred | kCFStreamEventEndEncountered; + if (!CFReadStreamSetClient(stream, events, cf_response_callback, &ctx)) { + CFRelease(stream); + CFRelease(res.data); + return 0; + } + CFReadStreamScheduleWithRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopDefaultMode); + if (!CFReadStreamOpen(stream)) { + CFReadStreamUnscheduleFromRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopDefaultMode); + CFRelease(stream); + CFRelease(res.data); + return 0; + } + + while (!res.complete) { + CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.01, false); + } + + CFReadStreamUnscheduleFromRunLoop(stream, CFRunLoopGetCurrent(), kCFRunLoopDefaultMode); + CFRelease(stream); + + if (!res.success) { + CFRelease(res.data); + return 0; + } + + CFIndex len = CFDataGetLength(res.data); + par_byte *bytes = (par_byte *)malloc((size_t)len + 1); + if (!bytes) { + CFRelease(res.data); + return 0; + } + CFDataGetBytes(res.data, CFRangeMake(0, len), bytes); + bytes[len] = 0; + CFRelease(res.data); + + *data = bytes; + *nbytes = (int)len; + return 1; +} + +#else #define PAR_EASYCURL_IMPLEMENTATION #include "par_easycurl.h" +#endif + #include #include #include @@ -34,7 +319,7 @@ static const JSCFunctionListEntry js_http_funcs[] = { }; JSValue js_http_use(JSContext *js) { - par_easycurl_init(0); // Initialize curl + par_easycurl_init(0); // Initialize platform HTTP backend JSValue obj = JS_NewObject(js); JS_SetPropertyFunctionList(js, obj, js_http_funcs, sizeof(js_http_funcs)/sizeof(js_http_funcs[0])); diff --git a/scripts/os.c b/scripts/os.c index 10a45ddd..994e72c7 100644 --- a/scripts/os.c +++ b/scripts/os.c @@ -8,12 +8,12 @@ #include #include #include -#include #ifdef _WIN32 #include #include #else +#include #include #include #include @@ -46,7 +46,11 @@ static JSClassDef js_dylib_class = { .finalizer = js_dylib_finalizer, }; +#ifndef _WIN32 +#ifdef __APPLE__ #include +#endif +#endif uint64_t cell_ns() { diff --git a/scripts/shop.cm b/scripts/shop.cm index 073e4b97..c28f2c97 100644 --- a/scripts/shop.cm +++ b/scripts/shop.cm @@ -527,86 +527,6 @@ Shop.get_module_dir = function(alias) { return '.cell/modules/' + parsed.path } -// Install a dependency -Shop.install = function(alias) { - var config = Shop.load_config() - if (!config || !config.dependencies || !config.dependencies[alias]) { - log.error("Dependency not found in config: " + alias) - return false - } - - var pkg = config.dependencies[alias] - var parsed = Shop.parse_package(pkg) - var target_dir = '.cell/modules/' + parsed.path - - log.console("Installing " + alias + " (" + pkg + ")...") - - // 1. Get Commit Hash - var api_url = Shop.get_api_url(pkg) - var commit_hash = null - if (api_url) { - try { - log.console("Fetching info from " + api_url) - var resp = http.fetch(api_url) - var resp_text = text(resp) - commit_hash = Shop.extract_commit_hash(pkg, resp_text) - log.console("Resolved commit: " + commit_hash) - } catch (e) { - log.console("Warning: Failed to fetch API info: " + e) - } - } - - // 2. Download Zip - var download_url = Shop.get_download_url(pkg) - if (!download_url) { - log.error("Could not determine download URL for " + pkg) - return false - } - - log.console("Downloading from " + download_url) - var zip_blob - try { - zip_blob = http.fetch(download_url) - } catch (e) { - log.error("Download failed: " + e) - return false - } - - // 3. Unpack - log.console("Unpacking to " + target_dir) - ensure_dir(target_dir) - - var zip = miniz.read(zip_blob) - if (!zip) { - log.error("Failed to read zip archive") - return false - } - - var count = zip.count() - for (var i = 0; i < count; i++) { - if (zip.is_dir(i)) continue - - var filename = zip.get_filename(i) - // Strip top-level directory - var parts = filename.split('/') - if (parts.length > 1) { - parts.shift() // Remove root folder - var rel_path = parts.join('/') - - var full_path = target_dir + '/' + rel_path - var dir_path = full_path.substring(0, full_path.lastIndexOf('/')) - ensure_dir(dir_path) - - var content = zip.slurp(filename) - fd.slurpwrite(full_path, content) - } - } - - // 4. Update Lock (only for root package) - log.console("Installed " + alias) - return { commit: commit_hash, package: pkg } -} - function lock_package(loc) { var lock = Shop.load_lock() @@ -623,7 +543,6 @@ Shop.check_cache = function(pkg) { return true } - return false } @@ -1165,12 +1084,13 @@ function install_zip(zip_blob, target_dir) { if (!zip) throw new Error("Failed to read zip archive") if (fd.is_link(target_dir)) fd.unlink(target_dir) - else if (fd.is_dir(target_dir)) rm_recursive(target_dir) - log.console("Unpacking to " + target_dir) + log.console("Syncing to " + target_dir) ensure_dir(target_dir) + var zip_files = {} var count = zip.count() + for (var i = 0; i < count; i++) { if (zip.is_directory(i)) continue var filename = zip.get_filename(i) @@ -1179,12 +1099,44 @@ function install_zip(zip_blob, target_dir) { parts.shift() var rel_path = parts.join('/') + zip_files[rel_path] = { index: i, filename: filename } + } + + var existing_files = fd.is_dir(target_dir) ? get_all_files(target_dir) : [] + + for (var i = 0; i < existing_files.length; i++) { + var rel_path = existing_files[i] + if (!zip_files[rel_path]) { + var full_path = target_dir + '/' + rel_path + log.console("Removing " + rel_path) + fd.rm(full_path) + } + } + + for (var rel_path in zip_files) { + var zip_info = zip_files[rel_path] var full_path = target_dir + '/' + rel_path var dir_path = full_path.substring(0, full_path.lastIndexOf('/')) - ensure_dir(dir_path) - var content = zip.slurp(filename) - fd.slurpwrite(full_path, content) + var zip_content = zip.slurp(zip_info.filename) + var needs_write = true + + if (fd.is_file(full_path)) { + var disk_content = fd.slurp(full_path) + if (disk_content.length == zip_content.length && disk_content.length != 0) { + var hash_zip = text(crypto.blake2(zip_content), 'h') + var hash_disk = text(crypto.blake2(disk_content), 'h') + if (hash_zip == hash_disk) { + needs_write = false + } + } + } + + if (needs_write) { + ensure_dir(dir_path) + log.console("Writing " + rel_path) + fd.slurpwrite(full_path, zip_content) + } } } diff --git a/source/scheduler_threaded.c b/source/scheduler_threaded.c index 586a5ec9..0e9f47cc 100644 --- a/source/scheduler_threaded.c +++ b/source/scheduler_threaded.c @@ -11,6 +11,10 @@ #include "cell.h" #include "cell_internal.h" +#ifdef _WIN32 +#include +#endif + typedef struct actor_node { cell_rt *actor; struct actor_node *next; @@ -234,7 +238,13 @@ void actor_initialize(void) { pthread_create(&engine.timer_thread, NULL, timer_thread_func, NULL); // Start Workers +#ifdef _WIN32 + SYSTEM_INFO sysinfo; + GetSystemInfo(&sysinfo); + long n = sysinfo.dwNumberOfProcessors; +#else long n = sysconf(_SC_NPROCESSORS_ONLN); +#endif engine.num_workers = (int)n; engine.worker_threads = malloc(sizeof(pthread_t) * n); for (int i=0; i < n; i++) { diff --git a/subprojects/curl.wrap b/subprojects/curl.wrap deleted file mode 100644 index f7e384b8..00000000 --- a/subprojects/curl.wrap +++ /dev/null @@ -1,13 +0,0 @@ -[wrap-file] -directory = curl-8.10.1 -source_url = https://github.com/curl/curl/releases/download/curl-8_10_1/curl-8.10.1.tar.xz -source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/curl_8.10.1-1/curl-8.10.1.tar.xz -source_filename = curl-8.10.1.tar.xz -source_hash = 73a4b0e99596a09fa5924a4fb7e4b995a85fda0d18a2c02ab9cf134bebce04ee -patch_filename = curl_8.10.1-1_patch.zip -patch_url = https://wrapdb.mesonbuild.com/v2/curl_8.10.1-1/get_patch -patch_hash = 707c28f35fc9b0e8d68c0c2800712007612f922a31da9637ce706a2159f3ddd8 -wrapdb_version = 8.10.1-1 - -[provide] -dependency_names = libcurl diff --git a/subprojects/openssl.wrap b/subprojects/openssl.wrap deleted file mode 100644 index 873d5510..00000000 --- a/subprojects/openssl.wrap +++ /dev/null @@ -1,15 +0,0 @@ -[wrap-file] -directory = openssl-3.0.8 -source_url = https://www.openssl.org/source/openssl-3.0.8.tar.gz -source_filename = openssl-3.0.8.tar.gz -source_hash = 6c13d2bf38fdf31eac3ce2a347073673f5d63263398f1f69d0df4a41253e4b3e -patch_filename = openssl_3.0.8-3_patch.zip -patch_url = https://wrapdb.mesonbuild.com/v2/openssl_3.0.8-3/get_patch -patch_hash = 300da189e106942347d61a4a4295aa2edbcf06184f8d13b4cee0bed9fb936963 -source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/openssl_3.0.8-3/openssl-3.0.8.tar.gz -wrapdb_version = 3.0.8-3 - -[provide] -libcrypto = libcrypto_dep -libssl = libssl_dep -openssl = openssl_dep diff --git a/subprojects/zlib.wrap b/subprojects/zlib.wrap deleted file mode 100644 index aa14de17..00000000 --- a/subprojects/zlib.wrap +++ /dev/null @@ -1,13 +0,0 @@ -[wrap-file] -directory = zlib-1.3.1 -source_url = http://zlib.net/fossils/zlib-1.3.1.tar.gz -source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/zlib_1.3.1-1/zlib-1.3.1.tar.gz -source_filename = zlib-1.3.1.tar.gz -source_hash = 9a93b2b7dfdac77ceba5a558a580e74667dd6fede4585b91eefb60f03b72df23 -patch_filename = zlib_1.3.1-1_patch.zip -patch_url = https://wrapdb.mesonbuild.com/v2/zlib_1.3.1-1/get_patch -patch_hash = e79b98eb24a75392009cec6f99ca5cdca9881ff20bfa174e8b8926d5c7a47095 -wrapdb_version = 1.3.1-1 - -[provide] -zlib = zlib_dep