aboutsummaryrefslogtreecommitdiff
path: root/src/Routes
diff options
context:
space:
mode:
authorfrosty <gabriel@bwaaa.monster>2026-02-27 18:32:23 -0500
committerfrosty <gabriel@bwaaa.monster>2026-02-27 18:32:23 -0500
commit9f2cd561286784fd000eb8a00f1f80db3185062c (patch)
tree14216b6d50b34bab1c7f7ae70d628d3560613f9e /src/Routes
parent26e3403e039d1a80f2e62f8efe889ad5f40c8cee (diff)
downloadomnisearch-9f2cd561286784fd000eb8a00f1f80db3185062c.tar.gz
added proxying
Diffstat (limited to 'src/Routes')
-rw-r--r--src/Routes/ImageProxy.c3
-rw-r--r--src/Routes/Images.c5
-rw-r--r--src/Routes/Search.c16
3 files changed, 18 insertions, 6 deletions
diff --git a/src/Routes/ImageProxy.c b/src/Routes/ImageProxy.c
index 9dadef7..5141cd5 100644
--- a/src/Routes/ImageProxy.c
+++ b/src/Routes/ImageProxy.c
@@ -1,5 +1,5 @@
#include "ImageProxy.h"
-
+#include "../Proxy/Proxy.h"
#include <curl/curl.h>
#include <stdio.h>
#include <stdlib.h>
@@ -118,6 +118,7 @@ int image_proxy_handler(UrlParams *params) {
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &buf);
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
curl_easy_setopt(curl, CURLOPT_TIMEOUT, 10L);
+ apply_proxy_settings(curl);
CURLcode res = curl_easy_perform(curl);
diff --git a/src/Routes/Images.c b/src/Routes/Images.c
index a4770c5..e96d6fd 100644
--- a/src/Routes/Images.c
+++ b/src/Routes/Images.c
@@ -1,5 +1,7 @@
#include "Images.h"
#include "../Utility/Unescape.h"
+#include "../Proxy/Proxy.h"
+#include "../Scraping/Scraping.h"
#include <curl/curl.h>
#include <libxml/HTMLparser.h>
@@ -50,6 +52,7 @@ static char *fetch_images_html(const char *url) {
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko");
curl_easy_setopt(curl_handle, CURLOPT_FOLLOWLOCATION, 1L);
curl_easy_setopt(curl_handle, CURLOPT_TIMEOUT, 10L);
+ apply_proxy_settings(curl_handle);
CURLcode res = curl_easy_perform(curl_handle);
if (res != CURLE_OK) {
@@ -247,7 +250,7 @@ int images_handler(UrlParams *params) {
}
image_matrix[image_count] = malloc(sizeof(char *) * 4);
- image_matrix[image_count][0] = proxy_url ? proxy_url : strdup((char *)iurl);
+ image_matrix[image_count][0] = proxy_url ? strdup(proxy_url) : strdup((char *)iurl);
image_matrix[image_count][1] = strdup(title ? (char *)title : "Image");
image_matrix[image_count][2] = strdup(rurl ? (char *)rurl : "#");
image_matrix[image_count][3] = strdup(full_url ? (char *)full_url : "#");
diff --git a/src/Routes/Search.c b/src/Routes/Search.c
index 060a222..19419db 100644
--- a/src/Routes/Search.c
+++ b/src/Routes/Search.c
@@ -88,10 +88,10 @@ static int add_infobox_to_collection(InfoBox *infobox, char ****collection,
(int *)realloc(*inner_counts, sizeof(int) * (current_count + 1));
(*collection)[current_count] = (char **)malloc(sizeof(char *) * 4);
- (*collection)[current_count][0] = infobox->title;
- (*collection)[current_count][1] = infobox->thumbnail_url;
- (*collection)[current_count][2] = infobox->extract;
- (*collection)[current_count][3] = infobox->url;
+ (*collection)[current_count][0] = infobox->title ? strdup(infobox->title) : NULL;
+ (*collection)[current_count][1] = infobox->thumbnail_url ? strdup(infobox->thumbnail_url) : NULL;
+ (*collection)[current_count][2] = infobox->extract ? strdup(infobox->extract) : NULL;
+ (*collection)[current_count][3] = infobox->url ? strdup(infobox->url) : NULL;
(*inner_counts)[current_count] = 4;
return current_count + 1;
@@ -151,6 +151,10 @@ int results_handler(UrlParams *params) {
jobs[i].max_results = 10;
jobs[i].results_count = 0;
jobs[i].page = page;
+ jobs[i].handle = NULL;
+ jobs[i].response.memory = NULL;
+ jobs[i].response.size = 0;
+ jobs[i].response.capacity = 0;
}
scrape_engines_parallel(jobs, ENGINE_COUNT);
@@ -185,6 +189,10 @@ int results_handler(UrlParams *params) {
if (infobox_count > 0) {
context_set_array_of_arrays(&ctx, "infoboxes", infobox_matrix,
infobox_count, infobox_inner_counts);
+ for (int i = 0; i < infobox_count; i++) {
+ for (int j = 0; j < 4; j++) free(infobox_matrix[i][j]);
+ free(infobox_matrix[i]);
+ }
free(infobox_matrix);
free(infobox_inner_counts);
}