Compare commits

...

4 Commits

Author SHA1 Message Date
Archilocos 2cb03c711b
Merge f361527459 into f48ee93931 2024-02-02 10:20:05 +00:00
Fufu Fang f48ee93931
Update README.md 2024-02-01 09:58:05 +00:00
Fufu Fang 983b1edfbd
Updated README 2024-02-01 06:28:36 +00:00
liuchenghao f361527459 fix: When the network is abnormal during the file download, start to resume the transfer 2021-09-22 09:59:57 +08:00
4 changed files with 88 additions and 20 deletions

View File

@ -1,3 +1,8 @@
[![CodeQL](https://github.com/fangfufu/httpdirfs/actions/workflows/codeql.yml/badge.svg)](https://github.com/fangfufu/httpdirfs/actions/workflows/codeql.yml)
[![CodeFactor](https://www.codefactor.io/repository/github/fangfufu/httpdirfs/badge)](https://www.codefactor.io/repository/github/fangfufu/httpdirfs)
[![Codacy Badge](https://app.codacy.com/project/badge/Grade/30af0a5b4d6f4a4d83ddb68f5193ad23)](https://app.codacy.com/gh/fangfufu/httpdirfs/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_grade)
[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=fangfufu_httpdirfs&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=fangfufu_httpdirfs)
# HTTPDirFS - HTTP Directory Filesystem with a permanent cache, and Airsonic / Subsonic server support!
Have you ever wanted to mount those HTTP directory listings as if it was a
@ -22,9 +27,9 @@ present a HTTP directory listing.
## Installation
Please note if you install HTTDirFS from a repository, it can be outdated.
### Debian 11 "Bullseye"
HTTPDirFS is available as a package in Debian 11 "Bullseye", If you are on
Debian Bullseye, you can simply run the following
### Debian 12 "Bookworm"
HTTPDirFS is available as a package in Debian 12 "Bookworm", If you are on
Debian Bookworm, you can simply run the following
command as ``root``:
apt install httpdirfs
@ -42,14 +47,14 @@ HTTPDirFS is available in the
## Compilation
### Ubuntu
Under Ubuntu 18.04.4 LTS, you need the following packages:
Under Ubuntu 22.04 LTS, you need the following packages:
libgumbo-dev libfuse-dev libssl-dev libcurl4-openssl-dev uuid-dev
libgumbo-dev libfuse-dev libssl-dev libcurl4-openssl-dev uuid-dev help2man
### Debian 11 "Bullseye" and Debian 10 "Buster"
Under Debian 10 "Buster" and newer versions, you need the following packages:
### Debian 12 "Bookworm"
Under Debian 12 "Bookworm" and newer versions, you need the following packages:
libgumbo-dev libfuse-dev libssl-dev libcurl4-openssl-dev uuid-dev
libgumbo-dev libfuse-dev libssl-dev libcurl4-openssl-dev uuid-dev help2man
### FreeBSD
The following dependencies are required from either pkg or ports:
@ -300,6 +305,8 @@ for the technical and moral support. Your wisdom is much appreciated!
compatibility patches.
- I would like to thank [hiliev](https://github.com/hiliev) for providing macOS
compatibility patches.
- I would like to thank [Jonathan Kamens](https://github.com/jikamens) for providing
a whole bunch of code improvements and the improved build system.
- I would like to thank [-Archivist](https://www.reddit.com/user/-Archivist/)
for not providing FTP or WebDAV access to his server. This piece of software was
written in direct response to his appalling behaviour.

View File

@ -237,11 +237,13 @@ static void LinkTable_uninitialised_fill(LinkTable *linktbl)
}
/*
* Block until the gaps are filled
* result is an invalid variable
*/
int n = curl_multi_perform_once();
int result = 0;
int n = curl_multi_perform_once(&result);
int i = 0;
int j = 0;
while ((i = curl_multi_perform_once())) {
while ((i = curl_multi_perform_once(&result))) {
if (CONFIG.log_type & debug) {
if (j) {
erase_string(stderr, STATUS_LEN, s);
@ -949,7 +951,7 @@ TransferStruct Link_download_full(Link *link)
*/
long http_resp = 0;
do {
transfer_blocking(curl);
transfer_blocking(curl, ts.curr_size);
ret = curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &http_resp);
if (ret) {
lprintf(error, "%s", curl_easy_strerror(ret));
@ -1075,7 +1077,7 @@ long Link_download(Link *link, char *output_buf, size_t req_size, off_t offset)
CURL *curl = Link_download_curl_setup(link, req_size, offset, &header, &ts);
transfer_blocking(curl);
transfer_blocking(curl, offset);
curl_off_t recv = Link_download_cleanup(curl, &header);

View File

@ -10,6 +10,7 @@
#include <string.h>
#include <stdio.h>
#include <unistd.h>
#include <stdbool.h>
/*
* ----------------- External variables ----------------------
@ -110,9 +111,10 @@ curl_callback_unlock(CURL *handle, curl_lock_data data, void *userptr)
* \details Adapted from:
* https://curl.haxx.se/libcurl/c/10-at-a-time.html
*/
static void
static int
curl_process_msgs(CURLMsg *curl_msg, int n_running_curl, int n_mesgs)
{
int result = 0;
(void) n_running_curl;
(void) n_mesgs;
static volatile int slept = 0;
@ -163,6 +165,7 @@ curl_process_msgs(CURLMsg *curl_msg, int n_running_curl, int n_mesgs)
lprintf(error, "%d - %s <%s>\n",
curl_msg->data.result,
curl_easy_strerror(curl_msg->data.result), url);
result = curl_msg->data.result;
}
curl_multi_remove_handle(curl_multi, curl);
/*
@ -175,13 +178,50 @@ curl_process_msgs(CURLMsg *curl_msg, int n_running_curl, int n_mesgs)
} else {
lprintf(warning, "curl_msg->msg: %d\n", curl_msg->msg);
}
return result;
}
static int http_error_result(int http_response)
{
switch(http_response)
{
case 0: //eg connection down from kick-off ~suggest retrying till some max limit
case 200: //yay we at least got to our url
case 206: //Partial Content
break;
case 416:
//cannot d/l range ~ either cos no server support
//or cos we're asking for an invalid range ~ie: we already d/ld the file
printf("HTTP416: either the d/l is already complete or the http server cannot d/l a range\n");
default:
return 0;//suggest quitting on an unhandled error
}
return 1;
}
static int curl_error_result(int curl_result)
{
switch (curl_result)
{
case CURLE_OK:
case CURLE_COULDNT_CONNECT: //no network connectivity ?
case CURLE_OPERATION_TIMEDOUT: //cos of CURLOPT_LOW_SPEED_TIME
case CURLE_COULDNT_RESOLVE_HOST: //host/DNS down ?
break; //we'll keep trying
default://see: http://curl.haxx.se/libcurl/c/libcurl-errors.html
return 0;
}
return 1;
}
/**
* \details effectively based on
* https://curl.haxx.se/libcurl/c/multi-double.html
*/
int curl_multi_perform_once(void)
int curl_multi_perform_once(int *result)
{
lprintf(network_lock_debug,
"thread %x: locking transfer_lock;\n", pthread_self());
@ -207,7 +247,12 @@ int curl_multi_perform_once(void)
int n_mesgs;
CURLMsg *curl_msg;
while ((curl_msg = curl_multi_info_read(curl_multi, &n_mesgs))) {
curl_process_msgs(curl_msg, n_running_curl, n_mesgs);
int nResult = curl_process_msgs(curl_msg, n_running_curl, n_mesgs);
if (!http_error_result(n_mesgs) || !curl_error_result(nResult)) {
*result = 1;
}else{
*result = 0;
}
}
lprintf(network_lock_debug,
@ -272,7 +317,7 @@ void NetworkSystem_init(void)
crypto_lock_init();
}
void transfer_blocking(CURL *curl)
void transfer_blocking(CURL *curl, size_t start)
{
TransferStruct *ts;
CURLcode ret = curl_easy_getinfo(curl, CURLINFO_PRIVATE, &ts);
@ -293,8 +338,22 @@ void transfer_blocking(CURL *curl)
"thread %x: unlocking transfer_lock;\n", pthread_self());
PTHREAD_MUTEX_UNLOCK(&transfer_lock);
while (ts->transferring) {
curl_multi_perform_once();
int result = 0;
bool restartDown = false;
while (ts->transferring && !restartDown) {
/*
* When the network is abnormal during the file download, start to resume the transfer
*/
if (0 != result) {
curl_multi_remove_handle(curl_multi,curl);
curl_easy_setopt(curl, CURLOPT_RESUME_FROM_LARGE, start);
res = curl_multi_add_handle(curl_multi, curl);
if (res > 0) {
lprintf(error, "%d, %s\n", res, curl_multi_strerror(res));
}
}
curl_multi_perform_once(&result);
}
}

View File

@ -26,13 +26,13 @@ typedef enum {
extern CURLSH *CURL_SHARE;
/** \brief perform one transfer cycle */
int curl_multi_perform_once(void);
int curl_multi_perform_once(int *result);
/** \brief initialise the network module */
void NetworkSystem_init(void);
/** \brief blocking file transfer */
void transfer_blocking(CURL *curl);
void transfer_blocking(CURL *curl, size_t start);
/** \brief non blocking file transfer */
void transfer_nonblocking(CURL *curl);