Bump version to v4.1.3 (#25757)

This commit is contained in:
KMY 2023-07-06 23:09:32 +09:00
parent b96be29ae7
commit 82338ded9f
15 changed files with 207 additions and 33 deletions

View file

@ -7,11 +7,48 @@ require 'resolv'
# Monkey-patch the HTTP.rb timeout class to avoid using a timeout block
# around the Socket#open method, since we use our own timeout blocks inside
# that method
#
# Also changes how the read timeout behaves so that it is cumulative (closer
# to HTTP::Timeout::Global, but still having distinct timeouts for other
# operation types)
class HTTP::Timeout::PerOperation
def connect(socket_class, host, port, nodelay = false)
@socket = socket_class.open(host, port)
@socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) if nodelay
end
# Reset deadline when the connection is re-used for different requests
def reset_counter
@deadline = nil
end
# Read data from the socket
def readpartial(size, buffer = nil)
@deadline ||= Process.clock_gettime(Process::CLOCK_MONOTONIC) + @read_timeout
timeout = false
loop do
result = @socket.read_nonblock(size, buffer, exception: false)
return :eof if result.nil?
remaining_time = @deadline - Process.clock_gettime(Process::CLOCK_MONOTONIC)
raise HTTP::TimeoutError, "Read timed out after #{@read_timeout} seconds" if timeout || remaining_time <= 0
return result if result != :wait_readable
# marking the socket for timeout. Why is this not being raised immediately?
# it seems there is some race-condition on the network level between calling
# #read_nonblock and #wait_readable, in which #read_nonblock signalizes waiting
# for reads, and when waiting for x seconds, it returns nil suddenly without completing
# the x seconds. In a normal case this would be a timeout on wait/read, but it can
# also mean that the socket has been closed by the server. Therefore we "mark" the
# socket for timeout and try to read more bytes. If it returns :eof, it's all good, no
# timeout. Else, the first timeout was a proper timeout.
# This hack has to be done because io/wait#wait_readable doesn't provide a value for when
# the socket is closed by the server, and HTTP::Parser doesn't provide the limit for the chunks.
timeout = true unless @socket.to_io.wait_readable(remaining_time)
end
end
end
class Request

View file

@ -53,6 +53,26 @@ class TextFormatter
html.html_safe # rubocop:disable Rails/OutputSafety
end
class << self
include ERB::Util
def shortened_link(url, rel_me: false)
url = Addressable::URI.parse(url).to_s
rel = rel_me ? (DEFAULT_REL + %w(me)) : DEFAULT_REL
prefix = url.match(URL_PREFIX_REGEX).to_s
display_url = url[prefix.length, 30]
suffix = url[prefix.length + 30..-1]
cutoff = url[prefix.length..-1].length > 30
<<~HTML.squish
<a href="#{h(url)}" target="_blank" rel="#{rel.join(' ')}" translate="no"><span class="invisible">#{h(prefix)}</span><span class="#{cutoff ? 'ellipsis' : ''}">#{h(display_url)}</span><span class="invisible">#{h(suffix)}</span></a>
HTML
rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
h(url)
end
end
private
def rewrite
@ -75,19 +95,7 @@ class TextFormatter
end
def link_to_url(entity)
url = Addressable::URI.parse(entity[:url]).to_s
rel = with_rel_me? ? (DEFAULT_REL + %w(me)) : DEFAULT_REL
prefix = url.match(URL_PREFIX_REGEX).to_s
display_url = url[prefix.length, 30]
suffix = url[prefix.length + 30..-1]
cutoff = url[prefix.length..-1].length > 30
<<~HTML.squish
<a href="#{h(url)}" target="_blank" rel="#{rel.join(' ')}"><span class="invisible">#{h(prefix)}</span><span class="#{cutoff ? 'ellipsis' : ''}">#{h(display_url)}</span><span class="invisible">#{h(suffix)}</span></a>
HTML
rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
h(entity[:url])
TextFormatter.shortened_link(entity[:url], rel_me: with_rel_me?)
end
def link_to_hashtag(entity)