From 50b5c1ed3de75d0fcc24ae3a858736cee3eabe58 Mon Sep 17 00:00:00 2001 From: Fufu Fang Date: Tue, 24 Jul 2018 02:51:34 +0100 Subject: [PATCH] now uses curl-multi interface, it is so much faster --- Makefile | 2 +- README.md | 5 ++--- network.c | 14 +++++++++----- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/Makefile b/Makefile index 28015d0..b1ce11a 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ CC=gcc CFLAGS= -g -Wall -Wextra -lgumbo -lcurl -lfuse -D_FILE_OFFSET_BITS=64 \ --DHTTPDIRFS_INFO -DHTTPDIRFS_SINGLE +-DHTTPDIRFS_INFO OBJ = main.o network.o %.o: %.c diff --git a/README.md b/README.md index b2b919f..fc57da0 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ Have you ever wanted to mount those HTTP directory listings as if it was a partition? Look no further, this is your solution. HTTPDirFS stands for Hyper Text Transfer Protocol Directory Filesystem +The performance of the program is excellent, due to the use of curl-multi interface. HTTP connections are reused + ## Usage ./httpdirfs -f $URL $YOUR_MOUNT_POINT An example URL would be [Debian CD Image Server](https://cdimage.debian.org/debian-cd/). The ``-f`` flag keeps the program in the foreground, which is useful for monitoring which URL the filesystem is visiting. @@ -10,9 +12,6 @@ I noticed that most HTTP directory listings don't provide the file size for the This program downloads the HTML web pages/files using [libcurl](https://curl.haxx.se/libcurl/), then parses the listing pages using [Gumbo](https://github.com/google/gumbo-parser), and presents them using [libfuse](https://github.com/libfuse/libfuse) -## Notes -You might not want to use your actual GUI file browser when using this filesystem, KDE Dolphin caches the browser that you point your mouse at, it really does slow things down a lot! - ## LICENSE This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by diff --git a/network.c b/network.c index 6fa07fa..14e220c 100644 --- a/network.c +++ b/network.c @@ -296,9 +296,9 @@ static Link *Link_new(const char *p_url, LinkType type) static CURL *Link_to_curl(Link *link) { - #ifdef HTTPDIRFS_INFO +#ifdef HTTPDIRFS_INFO fprintf(stderr, "Link_to_curl(%s);\n", link->f_url); - #endif +#endif CURL *curl = curl_easy_init(); if (!curl) { @@ -341,13 +341,14 @@ long Link_download(const char *path, char *output_buf, size_t size, buf.size = 0; buf.memory = NULL; - CURL *curl = Link_to_curl(link); - curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void *)&buf); - curl_easy_setopt(curl, CURLOPT_RANGE, range_str); #ifdef HTTPDIRFS_INFO fprintf(stderr, "Link_download(%s, %p, %s);\n", path, output_buf, range_str); #endif + CURL *curl = Link_to_curl(link); + curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void *)&buf); + curl_easy_setopt(curl, CURLOPT_RANGE, range_str); + transfer_wrapper(curl); long http_resp; @@ -374,6 +375,9 @@ long Link_download(const char *path, char *output_buf, size_t size, LinkTable *LinkTable_new(const char *url) { +#ifdef HTTPDIRFS_INFO + fprintf(stderr, "LinkTable_new(%s);\n", url); +#endif LinkTable *linktbl = calloc(1, sizeof(LinkTable)); if (!linktbl) { fprintf(stderr, "LinkTable_new(): calloc failure!\n");