Fix a compile error when SSL is disabled.

master
Chunting Gu 6 years ago
parent 8c09189b14
commit 889024d6c7

@ -51,7 +51,8 @@ CompactNamespaces: false
ConstructorInitializerAllOnOneLineOrOnePerLine: true
ConstructorInitializerIndentWidth: 4
ContinuationIndentWidth: 4
Cpp11BracedListStyle: true
# Cpp11BracedListStyle: true
Cpp11BracedListStyle: false
DerivePointerAlignment: false
DisableFormat: false
ExperimentalAutoDetectBinPacking: false

@ -59,7 +59,7 @@ int main() {
#endif // WEBCC_ENABLE_SSL
} catch (const webcc::Error& error) {
std::cout << error << std::endl;
std::cerr << error << std::endl;
}
return 0;

@ -78,7 +78,7 @@ void Client::Connect(RequestPtr request) {
DoConnect(request, "443");
#else
LOG_ERRO("SSL/HTTPS support is not enabled.");
return kSyntaxError;
error_.Set(Error::kSyntaxError, "SSL/HTTPS is not supported");
#endif // WEBCC_ENABLE_SSL
} else {
socket_.reset(new Socket{ io_context_ });

@ -47,8 +47,9 @@ std::string FromExtension(const std::string& extension,
// -----------------------------------------------------------------------------
std::ostream& operator<<(std::ostream& os, const Error& error) {
os << "ERROR(";
os << std::to_string(static_cast<int>(error.code()));
os << ": " << error.message();
os << "): " << error.message();
if (error.timeout()) {
os << " (timeout)";
}

@ -97,6 +97,7 @@ static const bool g_terminal_has_color = []() {
return strcmp(term, "cygwin") == 0 || strcmp(term, "linux") == 0 ||
strcmp(term, "rxvt-unicode-256color") == 0 ||
strcmp(term, "screen") == 0 || strcmp(term, "screen-256color") == 0 ||
strcmp(term, "screen.xterm-256color") == 0 ||
strcmp(term, "tmux-256color") == 0 || strcmp(term, "xterm") == 0 ||
strcmp(term, "xterm-256color") == 0 ||
strcmp(term, "xterm-termite") == 0 || strcmp(term, "xterm-color") == 0;

Loading…
Cancel
Save