|
@@ -24,6 +24,7 @@
|
|
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
*/
|
|
|
|
|
|
+#include <AK/Debug.h>
|
|
|
#include <LibCore/Gzip.h>
|
|
|
#include <LibCore/TCPSocket.h>
|
|
|
#include <LibHTTP/HttpResponse.h>
|
|
@@ -31,24 +32,18 @@
|
|
|
#include <stdio.h>
|
|
|
#include <unistd.h>
|
|
|
|
|
|
-//#define JOB_DEBUG
|
|
|
-
|
|
|
namespace HTTP {
|
|
|
|
|
|
static ByteBuffer handle_content_encoding(const ByteBuffer& buf, const String& content_encoding)
|
|
|
{
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job::handle_content_encoding: buf has content_encoding = " << content_encoding;
|
|
|
-#endif
|
|
|
+ dbgln<debug_job>("Job::handle_content_encoding: buf has content_encoding={}", content_encoding);
|
|
|
|
|
|
if (content_encoding == "gzip") {
|
|
|
if (!Core::Gzip::is_compressed(buf)) {
|
|
|
dbgln("Job::handle_content_encoding: buf is not gzip compressed!");
|
|
|
}
|
|
|
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbgln("Job::handle_content_encoding: buf is gzip compressed!");
|
|
|
-#endif
|
|
|
+ dbgln<debug_job>("Job::handle_content_encoding: buf is gzip compressed!");
|
|
|
|
|
|
auto uncompressed = Core::Gzip::decompress(buf);
|
|
|
if (!uncompressed.has_value()) {
|
|
@@ -56,11 +51,11 @@ static ByteBuffer handle_content_encoding(const ByteBuffer& buf, const String& c
|
|
|
return buf;
|
|
|
}
|
|
|
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job::handle_content_encoding: Gzip::decompress() successful.\n"
|
|
|
- << " Input size = " << buf.size() << "\n"
|
|
|
- << " Output size = " << uncompressed.value().size();
|
|
|
-#endif
|
|
|
+ if constexpr (debug_job) {
|
|
|
+ dbgln("Job::handle_content_encoding: Gzip::decompress() successful.");
|
|
|
+ dbgln(" Input size: {}", buf.size());
|
|
|
+ dbgln(" Output size: {}", uncompressed.value().size());
|
|
|
+ }
|
|
|
|
|
|
return uncompressed.value();
|
|
|
}
|
|
@@ -82,9 +77,7 @@ void Job::flush_received_buffers()
|
|
|
{
|
|
|
if (!m_can_stream_response || m_buffered_size == 0)
|
|
|
return;
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job: Flushing received buffers: have " << m_buffered_size << " bytes in " << m_received_buffers.size() << " buffers";
|
|
|
-#endif
|
|
|
+ dbgln<debug_job>("Job: Flushing received buffers: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size());
|
|
|
for (size_t i = 0; i < m_received_buffers.size(); ++i) {
|
|
|
auto& payload = m_received_buffers[i];
|
|
|
auto written = do_write(payload);
|
|
@@ -97,14 +90,9 @@ void Job::flush_received_buffers()
|
|
|
}
|
|
|
ASSERT(written < payload.size());
|
|
|
payload = payload.slice(written, payload.size() - written);
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job: Flushing received buffers done: have " << m_buffered_size << " bytes in " << m_received_buffers.size() << " buffers";
|
|
|
-#endif
|
|
|
- return;
|
|
|
+ break;
|
|
|
}
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job: Flushing received buffers done: have " << m_buffered_size << " bytes in " << m_received_buffers.size() << " buffers";
|
|
|
-#endif
|
|
|
+ dbgln<debug_job>("Job: Flushing received buffers done: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size());
|
|
|
}
|
|
|
|
|
|
void Job::on_socket_connected()
|
|
@@ -114,10 +102,12 @@ void Job::on_socket_connected()
|
|
|
return;
|
|
|
m_sent_data = true;
|
|
|
auto raw_request = m_request.to_raw_request();
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbgln("Job: raw_request:");
|
|
|
- dbg() << String::copy(raw_request).characters();
|
|
|
-#endif
|
|
|
+
|
|
|
+ if constexpr (debug_job) {
|
|
|
+ dbgln("Job: raw_request:");
|
|
|
+ dbgln("{}", String::copy(raw_request));
|
|
|
+ }
|
|
|
+
|
|
|
bool success = write(raw_request);
|
|
|
if (!success)
|
|
|
deferred_invoke([this](auto&) { did_fail(Core::NetworkJob::Error::TransmissionFailed); });
|
|
@@ -208,14 +198,10 @@ void Job::on_socket_connected()
|
|
|
m_headers.set(name, value);
|
|
|
if (name.equals_ignoring_case("Content-Encoding")) {
|
|
|
// Assume that any content-encoding means that we can't decode it as a stream :(
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Content-Encoding " << value << " detected, cannot stream output :(";
|
|
|
-#endif
|
|
|
+ dbgln<debug_job>("Content-Encoding {} detected, cannot stream output :(", value);
|
|
|
m_can_stream_response = false;
|
|
|
}
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job: [" << name << "] = '" << value << "'";
|
|
|
-#endif
|
|
|
+ dbgln<debug_job>("Job: [{}] = '{}'", name, value);
|
|
|
return;
|
|
|
}
|
|
|
ASSERT(m_state == State::InBody);
|
|
@@ -230,9 +216,7 @@ void Job::on_socket_connected()
|
|
|
// read size
|
|
|
auto size_data = read_line(PAGE_SIZE);
|
|
|
auto size_lines = size_data.view().lines();
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job: Received a chunk with size _" << size_data << "_";
|
|
|
-#endif
|
|
|
+ dbgln<debug_job>("Job: Received a chunk with size '{}'", size_data);
|
|
|
if (size_lines.size() == 0) {
|
|
|
dbgln("Job: Reached end of stream");
|
|
|
finish_up();
|
|
@@ -254,36 +238,32 @@ void Job::on_socket_connected()
|
|
|
read_size = 0;
|
|
|
m_current_chunk_total_size = 0;
|
|
|
m_current_chunk_remaining_size = 0;
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job: Received the last chunk with extensions _" << size_string.substring_view(1, size_string.length() - 1) << "_";
|
|
|
-#endif
|
|
|
+
|
|
|
+ dbgln<debug_job>("Job: Received the last chunk with extensions '{}'", size_string.substring_view(1, size_string.length() - 1));
|
|
|
} else {
|
|
|
m_current_chunk_total_size = size;
|
|
|
m_current_chunk_remaining_size = size;
|
|
|
read_size = size;
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job: Chunk of size _" << size << "_ started";
|
|
|
-#endif
|
|
|
+
|
|
|
+ dbgln<debug_job>("Job: Chunk of size '{}' started", size);
|
|
|
}
|
|
|
}
|
|
|
} else {
|
|
|
read_size = remaining;
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job: Resuming chunk with _" << remaining << "_ bytes left over";
|
|
|
-#endif
|
|
|
+
|
|
|
+ dbgln<debug_job>("Job: Resuming chunk with '{}' bytes left over", remaining);
|
|
|
}
|
|
|
} else {
|
|
|
auto transfer_encoding = m_headers.get("Transfer-Encoding");
|
|
|
if (transfer_encoding.has_value()) {
|
|
|
auto encoding = transfer_encoding.value();
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job: This content has transfer encoding '" << encoding << "'";
|
|
|
-#endif
|
|
|
+
|
|
|
+ dbgln<debug_job>("Job: This content has transfer encoding '{}'", encoding);
|
|
|
if (encoding.equals_ignoring_case("chunked")) {
|
|
|
m_current_chunk_remaining_size = -1;
|
|
|
goto read_chunk_size;
|
|
|
} else {
|
|
|
- dbg() << "Job: Unknown transfer encoding _" << encoding << "_, the result will likely be wrong!";
|
|
|
+ dbgln("Job: Unknown transfer encoding '{}', the result will likely be wrong!", encoding);
|
|
|
}
|
|
|
}
|
|
|
}
|
|
@@ -308,13 +288,10 @@ void Job::on_socket_connected()
|
|
|
|
|
|
if (m_current_chunk_remaining_size.has_value()) {
|
|
|
auto size = m_current_chunk_remaining_size.value() - payload.size();
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job: We have " << size << " bytes left over in this chunk";
|
|
|
-#endif
|
|
|
+
|
|
|
+ dbgln<debug_job>("Job: We have {} bytes left over in this chunk", size);
|
|
|
if (size == 0) {
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Job: Finished a chunk of " << m_current_chunk_total_size.value() << " bytes";
|
|
|
-#endif
|
|
|
+ dbgln<debug_job>("Job: Finished a chunk of {} bytes", m_current_chunk_total_size.value());
|
|
|
|
|
|
if (m_current_chunk_total_size.value() == 0) {
|
|
|
m_state = State::Trailers;
|
|
@@ -323,10 +300,9 @@ void Job::on_socket_connected()
|
|
|
|
|
|
// we've read everything, now let's get the next chunk
|
|
|
size = -1;
|
|
|
- [[maybe_unused]] auto line = read_line(PAGE_SIZE);
|
|
|
-#ifdef JOB_DEBUG
|
|
|
- dbg() << "Line following (should be empty): _" << line << "_";
|
|
|
-#endif
|
|
|
+
|
|
|
+ if constexpr (debug_job)
|
|
|
+ dbgln("Line following (should be empty): '{}'", read_line(PAGE_SIZE));
|
|
|
}
|
|
|
m_current_chunk_remaining_size = size;
|
|
|
}
|