Fix various clippy warnings

This commit is contained in:
Sebastian Dröge 2019-07-04 18:30:26 +03:00
parent 2a7bcca7eb
commit b5b7e91c49
11 changed files with 45 additions and 57 deletions

View file

@ -305,18 +305,17 @@ impl FlvDemux {
} }
// TODO: pull mode // TODO: pull mode
if false // if query.has_scheduling_mode_with_flags(
&& query.has_scheduling_mode_with_flags( // gst::PadMode::Pull,
gst::PadMode::Pull, // gst::SchedulingFlags::SEEKABLE,
gst::SchedulingFlags::SEEKABLE, // )
) // {
{ // gst_debug!(CAT, obj: pad, "Activating in Pull mode");
gst_debug!(CAT, obj: pad, "Activating in Pull mode"); // gst::PadMode::Pull
gst::PadMode::Pull // } else {
} else { gst_debug!(CAT, obj: pad, "Activating in Push mode");
gst_debug!(CAT, obj: pad, "Activating in Push mode"); gst::PadMode::Push
gst::PadMode::Push // }
}
}; };
pad.activate_mode(mode, true)?; pad.activate_mode(mode, true)?;

View file

@ -290,10 +290,9 @@ impl S3Sink {
s3utils::wait(&self.canceller, &self.runtime, complete_req_future) s3utils::wait(&self.canceller, &self.runtime, complete_req_future)
.map_err(|err| { .map_err(|err| {
err.unwrap_or(gst_error_msg!( err.unwrap_or_else(|| {
gst::LibraryError::Failed, gst_error_msg!(gst::LibraryError::Failed, ["Interrupted during stop"])
["Interrupted during stop"] })
))
}) })
.map(|_| ()) .map(|_| ())
} }
@ -313,16 +312,17 @@ impl S3Sink {
}); });
let response = s3utils::wait(&self.canceller, &self.runtime, create_multipart_req_future) let response = s3utils::wait(&self.canceller, &self.runtime, create_multipart_req_future)
.map_err(|err| { .map_err(|err| {
err.unwrap_or(gst_error_msg!( err.unwrap_or_else(|| {
gst::LibraryError::Failed, gst_error_msg!(gst::LibraryError::Failed, ["Interrupted during start"])
["Interrupted during start"] })
))
})?; })?;
let upload_id = response.upload_id.ok_or(gst_error_msg!( let upload_id = response.upload_id.ok_or_else(|| {
gst::ResourceError::Failed, gst_error_msg!(
["Failed to get multipart upload ID"] gst::ResourceError::Failed,
))?; ["Failed to get multipart upload ID"]
)
})?;
Ok(Started::new( Ok(Started::new(
Vec::with_capacity(self.settings.lock().unwrap().buffer_size as usize), Vec::with_capacity(self.settings.lock().unwrap().buffer_size as usize),

View file

@ -119,10 +119,9 @@ impl S3Src {
}), }),
) )
.map_err(|err| { .map_err(|err| {
err.unwrap_or(gst_error_msg!( err.unwrap_or_else(|| {
gst::LibraryError::Failed, gst_error_msg!(gst::LibraryError::Failed, ["Interrupted during start"])
["Interrupted during start"] })
))
})?; })?;
if let Some(size) = output.content_length { if let Some(size) = output.content_length {
@ -300,7 +299,7 @@ impl ObjectImpl for S3Src {
let basesrc = obj.downcast_ref::<gst_base::BaseSrc>().unwrap(); let basesrc = obj.downcast_ref::<gst_base::BaseSrc>().unwrap();
basesrc.set_format(gst::Format::Bytes); basesrc.set_format(gst::Format::Bytes);
/* Set a larger default blocksize to make read more efficient */ /* Set a larger default blocksize to make read more efficient */
basesrc.set_blocksize(262144); basesrc.set_blocksize(256 * 1024);
} }
} }
@ -368,7 +367,7 @@ impl BaseSrcImpl for S3Src {
*state = StreamingState::Started { *state = StreamingState::Started {
url: s3url, url: s3url,
client: s3client, client: s3client,
size: size, size,
}; };
Ok(()) Ok(())

View file

@ -85,10 +85,10 @@ pub fn parse_s3_url(url_str: &str) -> Result<GstS3Url, String> {
} }
Ok(GstS3Url { Ok(GstS3Url {
region: region, region,
bucket: bucket, bucket,
object: object, object,
version: version, version,
}) })
} }

View file

@ -140,7 +140,7 @@ impl State {
gst_debug!(CAT, obj: pad, "Returned pull size: {}", map.len()); gst_debug!(CAT, obj: pad, "Returned pull size: {}", map.len());
let mut nonce = add_nonce(self.initial_nonce.clone().unwrap(), chunk_index); let mut nonce = add_nonce(self.initial_nonce.unwrap(), chunk_index);
let block_size = self.block_size.expect("Block size wasn't set") as usize + box_::MACBYTES; let block_size = self.block_size.expect("Block size wasn't set") as usize + box_::MACBYTES;
for subbuffer in map.chunks(block_size) { for subbuffer in map.chunks(block_size) {

View file

@ -271,7 +271,6 @@ impl Encrypter {
})?, })?,
); );
drop(state);
drop(state_guard); drop(state_guard);
for buffer in buffers { for buffer in buffers {
@ -324,7 +323,6 @@ impl Encrypter {
} }
// drop the lock before pushing into the pad // drop the lock before pushing into the pad
drop(state);
drop(state_mutex); drop(state_mutex);
for buffer in buffers { for buffer in buffers {

View file

@ -223,7 +223,7 @@ impl AppSrc {
} else { } else {
q.get_filter() q.get_filter()
.map(|f| f.to_owned()) .map(|f| f.to_owned())
.unwrap_or_else(|| gst::Caps::new_any()) .unwrap_or_else(gst::Caps::new_any)
}; };
q.set_result(&caps); q.set_result(&caps);

View file

@ -858,7 +858,7 @@ impl ProxySrc {
} else { } else {
q.get_filter() q.get_filter()
.map(|f| f.to_owned()) .map(|f| f.to_owned())
.unwrap_or_else(|| gst::Caps::new_any()) .unwrap_or_else(gst::Caps::new_any)
}; };
q.set_result(&caps); q.set_result(&caps);

View file

@ -169,14 +169,10 @@ impl SocketRead for TcpClientReader {
}, },
}; };
match socket.poll_read(buf) { match socket.poll_read(buf) {
Ok(Async::Ready(result)) => { Ok(Async::Ready(result)) => Ok(Async::Ready((result, None))),
return Ok(Async::Ready((result, None))); Ok(Async::NotReady) => Ok(Async::NotReady),
}
Ok(Async::NotReady) => {
return Ok(Async::NotReady);
}
Err(result) => return Err(result), Err(result) => return Err(result),
}; }
} }
} }
@ -270,7 +266,7 @@ impl TcpClientSrc {
} else { } else {
q.get_filter() q.get_filter()
.map(|f| f.to_owned()) .map(|f| f.to_owned())
.unwrap_or_else(|| gst::Caps::new_any()) .unwrap_or_else(gst::Caps::new_any)
}; };
q.set_result(&caps); q.set_result(&caps);

View file

@ -303,14 +303,10 @@ impl SocketRead for UdpReader {
buf: &mut [u8], buf: &mut [u8],
) -> Poll<(usize, Option<std::net::SocketAddr>), io::Error> { ) -> Poll<(usize, Option<std::net::SocketAddr>), io::Error> {
match self.socket.poll_recv_from(buf) { match self.socket.poll_recv_from(buf) {
Ok(Async::Ready(result)) => { Ok(Async::Ready(result)) => Ok(Async::Ready((result.0, Some(result.1)))),
return Ok(Async::Ready((result.0, Some(result.1)))); Ok(Async::NotReady) => Ok(Async::NotReady),
} Err(result) => Err(result),
Ok(Async::NotReady) => { }
return Ok(Async::NotReady);
}
Err(result) => return Err(result),
};
} }
} }
@ -404,7 +400,7 @@ impl UdpSrc {
} else { } else {
q.get_filter() q.get_filter()
.map(|f| f.to_owned()) .map(|f| f.to_owned())
.unwrap_or_else(|| gst::Caps::new_any()) .unwrap_or_else(gst::Caps::new_any)
}; };
q.set_result(&caps); q.set_result(&caps);

View file

@ -2,7 +2,7 @@ use chrono::TimeZone;
use git2::{Commit, ObjectType, Repository}; use git2::{Commit, ObjectType, Repository};
use std::path; use std::path;
pub fn get_info() -> () { pub fn get_info() {
let mut commit_id = "UNKNOWN".to_string(); let mut commit_id = "UNKNOWN".to_string();
let mut commit_date = chrono::Utc::now().format("%Y-%m-%d").to_string(); let mut commit_date = chrono::Utc::now().format("%Y-%m-%d").to_string();