hexsha
stringlengths 40
40
| size
int64 2
1.05M
| content
stringlengths 2
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
8f277d6f4089da23bd2156744f383239e1cabe1d | 17,213 | /*
* Copyright 2018 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ------------------------------------------------------------------------------
*/
use std::collections::HashMap;
use std::fs::File;
use std::io::{self, Read, Write};
use clap::ArgMatches;
use protobuf;
use protobuf::Message;
use serde_yaml;
use crate::proto::block::{Block, BlockHeader};
use crate::proto::transaction::TransactionHeader;
use crate::blockstore::Blockstore;
use crate::config;
use crate::database::error::DatabaseError;
use crate::database::lmdb;
use crate::err::CliError;
use crate::wrappers::Block as BlockWrapper;
const NULL_BLOCK_IDENTIFIER: &str = "0000000000000000";
pub fn run<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {
match args.subcommand() {
("backup", Some(args)) => run_backup_command(args),
("restore", Some(args)) => run_restore_command(args),
("list", Some(args)) => run_list_command(args),
("show", Some(args)) => run_show_command(args),
("prune", Some(args)) => run_prune_command(args),
("export", Some(args)) => run_export_command(args),
("import", Some(args)) => run_import_command(args),
("stats", Some(args)) => run_stats_command(args),
_ => {
println!("Invalid subcommand; Pass --help for usage.");
Ok(())
}
}
}
fn run_backup_command<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {
let ctx = create_context()?;
let blockstore = open_blockstore(&ctx)?;
let filepath = args
.value_of("output")
.ok_or_else(|| CliError::ArgumentError("No output file".into()))?;
let mut file = File::create(filepath)
.map_err(|err| CliError::EnvironmentError(format!("Failed to create file: {}", err)))?;
let mut current = match args.value_of("start") {
None => blockstore.get_chain_head().map_err(|err| {
CliError::EnvironmentError(format!("unable to read chain head: {}", err))
}),
Some(sig) => Ok(sig.into()),
}?;
while current != NULL_BLOCK_IDENTIFIER {
let block = blockstore.get(¤t).map_err(|err| {
CliError::EnvironmentError(format!("Block in chain missing from blockstore: {}", err))
})?;
backup_block(&block, &mut file)?;
let block_header: BlockHeader = protobuf::parse_from_bytes(&block.header)
.map_err(|err| CliError::ParseError(format!("Unable to read block header: {}", err)))?;
current = block_header.previous_block_id
}
Ok(())
}
fn run_restore_command<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {
let ctx = create_context()?;
let blockstore = open_blockstore(&ctx)?;
let filepath = args
.value_of("input")
.ok_or_else(|| CliError::ArgumentError("No input file".into()))?;
let mut file = File::open(filepath)
.map_err(|err| CliError::EnvironmentError(format!("Failed to open file: {}", err)))?;
let mut source = protobuf::CodedInputStream::new(&mut file);
while let Some(block) = restore_block(&mut source)? {
blockstore
.put(&block)
.map_err(|err| CliError::EnvironmentError(format!("Failed to put block: {}", err)))?;
}
Ok(())
}
fn run_list_command<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {
let ctx = create_context()?;
let blockstore = open_blockstore(&ctx)?;
let mut count = u64::from_str_radix(args.value_of("count").unwrap_or("100"), 10).unwrap();
// Get the chain head
let head_sig = match args.value_of("start") {
None => blockstore.get_chain_head().map_err(|err| {
CliError::EnvironmentError(format!("failed to get chain head id: {}", err))
}),
Some(sig) => Ok(sig.into()),
}?;
// Walk back from the chain head
let mut block_id = head_sig;
print_block_store_list_header();
while block_id != NULL_BLOCK_IDENTIFIER && count > 0 {
let block = blockstore.get(&block_id).map_err(|err| {
CliError::EnvironmentError(format!("failed to read block {}: {}", block_id, err))
})?;
let block_header: BlockHeader =
protobuf::parse_from_bytes(&block.header).map_err(|err| {
CliError::ParseError(format!(
"failed to parse header for block {}: {}",
block_id, err
))
})?;
let batches = block.batches.len();
let txns = block
.batches
.iter()
.fold(0, |acc, batch| acc + batch.transactions.len());
print_block_store_list_row(
block_header.block_num,
&block.header_signature,
batches,
txns,
&block_header.signer_public_key,
);
block_id = block_header.previous_block_id;
count -= 1;
}
Ok(())
}
fn print_block_store_list_header() {
println!(
"{:<5} {:<128} {:<5} {:<5} SIGNER",
"NUM", "BLOCK_ID", "BATS", "TXNS",
);
}
fn print_block_store_list_row(
block_num: u64,
block_id: &str,
batches: usize,
txns: usize,
signer: &str,
) {
println!(
"{:<5} {:<128} {:<5} {:<5} {}...",
block_num,
block_id,
batches,
txns,
&signer[..6]
);
}
fn run_show_command<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {
let ctx = create_context()?;
let blockstore = open_blockstore(&ctx)?;
let block = {
if args.is_present("block") {
let block = args
.value_of("block")
.ok_or_else(|| CliError::ArgumentError("No block".into()))?;
blockstore.get(block)
} else if args.is_present("batch") {
let batch = args
.value_of("batch")
.ok_or_else(|| CliError::ArgumentError("No batch".into()))?;
blockstore.get_by_batch(batch)
} else if args.is_present("transaction") {
let transaction = args
.value_of("transaction")
.ok_or_else(|| CliError::ArgumentError("No transaction".into()))?;
blockstore.get_by_transaction(transaction)
} else if args.is_present("blocknum") {
let blocknum = args
.value_of("blocknum")
.ok_or_else(|| CliError::ArgumentError("No block num".into()))?;
let height: u64 = blocknum
.parse()
.map_err(|err| CliError::ArgumentError(format!("Invalid block num: {}", err)))?;
blockstore.get_by_height(height)
} else {
return Err(CliError::ArgumentError("No identifier specified".into()));
}
}
.map_err(|err| CliError::ArgumentError(format!("Error getting block: {}", err)))?;
let block_wrapper = BlockWrapper::try_from(block).map_err(|err| {
CliError::EnvironmentError(format!("failed to create block wrapper: {}", err))
})?;
let block_yaml = serde_yaml::to_string(&block_wrapper).map_err(|err| {
CliError::EnvironmentError(format!("failed to serialize block wrapper: {}", err))
})?;
println!("{}", block_yaml);
Ok(())
}
fn run_prune_command<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {
let ctx = create_context()?;
let blockstore = open_blockstore(&ctx)?;
let block_id = args
.value_of("block")
.ok_or_else(|| CliError::ArgumentError("No block id".into()))?;
blockstore
.get(block_id)
.map_err(|_| CliError::ArgumentError(format!("Block not found: {}", block_id)))?;
// Get the chain head
let chain_head = blockstore.get_chain_head().map_err(|err| {
CliError::EnvironmentError(format!("failed to get chain head id: {}", err))
})?;
let mut current = blockstore.get(&chain_head).map_err(|err| {
CliError::EnvironmentError(format!(
"failed to get chain head ({}): {}",
chain_head, err
))
})?;
loop {
blockstore
.delete(¤t.header_signature)
.map_err(|err| {
CliError::EnvironmentError(format!(
"failed to delete block {}: {}",
current.header_signature, err
))
})?;
if current.header_signature == block_id {
break;
}
let header: BlockHeader = protobuf::parse_from_bytes(¤t.header).map_err(|err| {
CliError::ParseError(format!(
"failed to parse block_header for block {}: {}",
current.header_signature, err
))
})?;
current = blockstore.get(&header.previous_block_id).map_err(|err| {
CliError::EnvironmentError(format!(
"failed to read block {}: {}",
header.previous_block_id, err
))
})?;
}
Ok(())
}
fn run_export_command<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {
let ctx = create_context()?;
let blockstore = open_blockstore(&ctx)?;
let block_id = args
.value_of("block")
.ok_or_else(|| CliError::ArgumentError("No block id".into()))?;
let block = blockstore
.get(block_id)
.map_err(|_| CliError::ArgumentError(format!("Block not found: {}", block_id)))?;
match args.value_of("output") {
Some(filepath) => {
let mut file = File::create(filepath).map_err(|err| {
CliError::EnvironmentError(format!("Failed to create file: {}", err))
})?;
block.write_to_writer(&mut file).map_err(|err| {
CliError::EnvironmentError(format!(
"failed to write {} to {}: {}",
block_id, filepath, err
))
})
}
None => {
let stdout = io::stdout();
let mut handle = stdout.lock();
block.write_to_writer(&mut handle).map_err(|err| {
CliError::EnvironmentError(format!(
"failed to write block {} to stdout: {}",
block_id, err
))
})
}
}
}
fn run_import_command<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {
let ctx = create_context()?;
let blockstore = open_blockstore(&ctx)?;
let filepath = args
.value_of("blockfile")
.ok_or_else(|| CliError::ArgumentError("No file".into()))?;
let mut file = File::open(filepath)
.map_err(|err| CliError::EnvironmentError(format!("Failed to open file: {}", err)))?;
let mut packed = Vec::new();
file.read_to_end(&mut packed)
.map_err(|err| CliError::EnvironmentError(format!("Failed to read file: {}", err)))?;
let block: Block = protobuf::parse_from_bytes(&packed)
.map_err(|err| CliError::ParseError(format!("{}", err)))?;
let block_header: BlockHeader = protobuf::parse_from_bytes(&block.header)
.map_err(|err| CliError::ParseError(format!("{}", err)))?;
let block_id = block.header_signature.clone();
// Ensure this block is an immediate child of the current chain head
match blockstore.get_chain_head() {
Ok(chain_head) => {
if block_header.previous_block_id != chain_head {
return Err(CliError::ArgumentError(format!(
"New block must be an immediate child of the current chain head: {}",
chain_head
)));
}
}
Err(DatabaseError::NotFoundError(_)) => (),
Err(err) => {
return Err(CliError::EnvironmentError(format!(
"failed to read chain head id: {}",
err
)));
}
}
blockstore.put(&block).map_err(|err| {
CliError::ArgumentError(format!("Failed to put block into database: {}", err))
})?;
println!("Block {} added", block_id);
Ok(())
}
fn run_stats_command<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {
let ctx = create_context()?;
let blockstore = open_blockstore(&ctx)?;
let block_count = blockstore.get_current_height().map_err(|err| {
CliError::EnvironmentError(format!("failed to read block count: {}", err))
})?;
let batch_count = blockstore.get_batch_count().map_err(|err| {
CliError::EnvironmentError(format!("failed to read batch count: {}", err))
})?;
let txn_count = blockstore.get_transaction_count().map_err(|err| {
CliError::EnvironmentError(format!("failed to read transaction count: {}", err))
})?;
if args.is_present("extended") {
let mut txn_family_counts = HashMap::new();
let chain_head = blockstore.get_chain_head().map_err(|err| {
CliError::EnvironmentError(format!("failed to get chain head id: {}", err))
})?;
let mut block = blockstore.get(&chain_head).map_err(|err| {
CliError::EnvironmentError(format!("failed to read chain head: {}", err))
})?;
loop {
for batch in &block.batches {
for txn in &batch.transactions {
let txn_header: TransactionHeader = protobuf::parse_from_bytes(&txn.header)
.map_err(|err| {
CliError::ParseError(format!(
"failed to parse header for transaction {}: {}",
txn.header_signature, err
))
})?;
let count = txn_family_counts.entry(txn_header.family_name).or_insert(0);
*count += 1;
}
}
let header: BlockHeader = protobuf::parse_from_bytes(&block.header).map_err(|err| {
CliError::ParseError(format!(
"failed to parse header for block {}: {}",
block.header_signature, err
))
})?;
if header.previous_block_id == NULL_BLOCK_IDENTIFIER {
break;
}
block = blockstore.get(&header.previous_block_id).map_err(|err| {
CliError::EnvironmentError(format!(
"failed to read block {}: {}",
header.previous_block_id, err
))
})?;
}
println!("Blocks: {}", block_count);
println!("Batches: {}", batch_count);
println!("Transactions: {}", txn_count);
for (family, count) in &txn_family_counts {
println!(" {}: {}", family, count);
}
} else {
println!("Blocks: {}", block_count);
println!("Batches: {}", batch_count);
println!("Transactions: {}", txn_count);
}
Ok(())
}
fn create_context() -> Result<lmdb::LmdbContext, CliError> {
let path_config = config::get_path_config();
let blockstore_path = &path_config.data_dir.join(config::get_blockstore_filename());
lmdb::LmdbContext::new(blockstore_path, 3, None).map_err(|err| {
CliError::EnvironmentError(format!("failed to create block store context: {}", err))
})
}
fn open_blockstore(ctx: &lmdb::LmdbContext) -> Result<Blockstore, CliError> {
let blockstore_db = lmdb::LmdbDatabase::new(
ctx,
&["index_batch", "index_transaction", "index_block_num"],
)
.map_err(|err| CliError::EnvironmentError(format!("failed to open block store DB: {}", err)))?;
Ok(Blockstore::new(blockstore_db))
}
fn backup_block<W: Write>(block: &Block, writer: &mut W) -> Result<(), CliError> {
block
.write_length_delimited_to_writer(writer)
.map_err(|err| CliError::EnvironmentError(format!("{}", err)))
}
fn restore_block(source: &mut protobuf::CodedInputStream) -> Result<Option<Block>, CliError> {
let eof = source
.eof()
.map_err(|err| CliError::EnvironmentError(format!("Failed to check EOF: {}", err)))?;
if eof {
return Ok(None);
}
source
.read_message()
.map(Some)
.map_err(|err| CliError::EnvironmentError(format!("Failed to parse block: {}", err)))
}
#[cfg(test)]
mod tests {
use super::*;
use protobuf::CodedInputStream;
#[test]
fn backup_and_restore() {
let mut buffer: Vec<u8> = vec![];
let mut block = Block::new();
block.set_header_signature("abc123".into());
backup_block(&block, &mut buffer).unwrap();
let mut is = CodedInputStream::from_bytes(&buffer);
let restored_block = restore_block(&mut is).unwrap();
assert_eq!(Some(block), restored_block);
assert_eq!(None, restore_block(&mut is).unwrap());
}
}
| 35.200409 | 99 | 0.568989 |
fbf295b55f5b1643d7befaf59a366d30432ba684 | 3,551 | use std::io;
use std::fmt;
use std::io::prelude::*;
use std::str::FromStr;
use std::collections::VecDeque;
#[derive(Debug)]
struct Display {
data: VecDeque<VecDeque<bool>>,
height: usize,
width: usize,
}
#[derive(Debug)]
enum Command {
Rect(usize, usize),
RotateRow(usize, usize),
RotateCol(usize, usize),
}
impl Display {
fn new(w: usize, h: usize) -> Display {
let mut rows = VecDeque::new();
for _ in 0..h {
let mut row = VecDeque::new();
for _ in 0..w {
row.push_back(false);
}
rows.push_back(row)
}
Display {
data: rows,
height: h,
width: w,
}
}
fn command(&mut self, cmd: &str) {
let cmd: Command = cmd.parse().unwrap();
use Command::*;
match cmd {
Rect(x, y) => {
for y in 0..y {
for x in 0..x {
self.data[y][x] = true;
}
}
}
RotateRow(y, offset) => {
for _ in 0..offset {
let last = self.data[y].pop_back().unwrap();
self.data[y].push_front(last);
}
}
RotateCol(x, offset) => {
for _ in 0..offset {
let mut new_column: VecDeque<bool> =
(0..self.height).map(|y| self.data[y][x]).collect();
let last = new_column.pop_back().unwrap();
new_column.push_front(last);
for (y, &pixel) in new_column.iter().enumerate() {
self.data[y][x] = pixel;
}
}
}
}
}
fn lit_pixels(&self) -> usize {
self.data.iter().flat_map(|row| row.iter().filter(|&pixel| *pixel)).count()
}
}
impl fmt::Display for Display {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let pixels = self.data
.iter()
.map(|row| row.iter().map(|&pixel| if pixel { '#' } else { '.' }).collect::<String>())
.collect::<Vec<_>>()
.join("\n");
write!(f, "{}", pixels)
}
}
impl FromStr for Command {
// Slice patterns would make this so much better
type Err = &'static str;
fn from_str(input: &str) -> Result<Command, Self::Err> {
let input = input.split_whitespace().collect::<Vec<_>>();
match input[0] {
"rect" => {
let xy = input[1].split('x').collect::<Vec<_>>();
let x = xy[0].parse().unwrap();
let y = xy[1].parse().unwrap();
Ok(Command::Rect(x, y))
}
"rotate" => {
let coord = input[2].split('=').last().unwrap().parse().unwrap();
let offset = input[4].parse().unwrap();
match input[1] {
"row" => Ok(Command::RotateRow(coord, offset)),
"column" => Ok(Command::RotateCol(coord, offset)),
_ => Err("invalid command"),
}
}
_ => Err("invalid command"),
}
}
}
fn main() {
let mut display = Display::new(50, 6);
let stdin = io::stdin();
let stdin = stdin.lock().lines();
for line in stdin {
let line = line.unwrap();
display.command(&line);
}
println!("part1: {}", display.lit_pixels());
println!("part2:\n{}", display);
}
| 27.742188 | 98 | 0.442411 |
380e2483c06dc4677a984de3cd52f22cee9dea31 | 38,611 | //! Expanded S-boxes generated using `gen_exp_sbox` function
type ExpSbox = [[u8; 256]; 4];
type SmallSbox = [[u8; 16]; 8];
/// Trait implemented for the GOST 28147-89 cipher S-boxes.
pub trait Sbox {
/// Expanded S-box
const EXP_SBOX: ExpSbox;
/// Unexpanded S-box
const SBOX: SmallSbox;
/// S-Box name
const NAME: &'static str;
/// Generate expanded version of S-box.
#[allow(clippy::needless_range_loop)]
fn gen_exp_sbox() -> ExpSbox {
let mut out = [[0u8; 256]; 4];
for i in 0..4 {
for j in 0..16 {
for k in 0..16 {
let v: u8 = Self::SBOX[2 * i][j] + (Self::SBOX[2 * i + 1][k] << 4);
let c: usize = j + (k << 4);
out[i][c] = v;
}
}
}
out
}
/// Apply S-box and return result.
fn apply_sbox(a: u32) -> u32 {
let mut v = 0;
for i in 0..4 {
let shft = 8 * i;
let k = ((a & (0xffu32 << shft)) >> shft) as usize;
v += (Self::EXP_SBOX[i][k] as u32) << shft;
}
v
}
/// Function `g` based on the S-box.
fn g(a: u32, k: u32) -> u32 {
Self::apply_sbox(a.wrapping_add(k)).rotate_left(11)
}
}
#[derive(Clone)]
pub enum Tc26 {}
impl Sbox for Tc26 {
const NAME: &'static str = "Tc26";
const EXP_SBOX: ExpSbox = [
[
108, 100, 102, 98, 106, 101, 107, 105, 110, 104, 109, 103, 96, 99, 111, 97, 140, 132,
134, 130, 138, 133, 139, 137, 142, 136, 141, 135, 128, 131, 143, 129, 44, 36, 38, 34,
42, 37, 43, 41, 46, 40, 45, 39, 32, 35, 47, 33, 60, 52, 54, 50, 58, 53, 59, 57, 62, 56,
61, 55, 48, 51, 63, 49, 156, 148, 150, 146, 154, 149, 155, 153, 158, 152, 157, 151,
144, 147, 159, 145, 172, 164, 166, 162, 170, 165, 171, 169, 174, 168, 173, 167, 160,
163, 175, 161, 92, 84, 86, 82, 90, 85, 91, 89, 94, 88, 93, 87, 80, 83, 95, 81, 204,
196, 198, 194, 202, 197, 203, 201, 206, 200, 205, 199, 192, 195, 207, 193, 28, 20, 22,
18, 26, 21, 27, 25, 30, 24, 29, 23, 16, 19, 31, 17, 236, 228, 230, 226, 234, 229, 235,
233, 238, 232, 237, 231, 224, 227, 239, 225, 76, 68, 70, 66, 74, 69, 75, 73, 78, 72,
77, 71, 64, 67, 79, 65, 124, 116, 118, 114, 122, 117, 123, 121, 126, 120, 125, 119,
112, 115, 127, 113, 188, 180, 182, 178, 186, 181, 187, 185, 190, 184, 189, 183, 176,
179, 191, 177, 220, 212, 214, 210, 218, 213, 219, 217, 222, 216, 221, 215, 208, 211,
223, 209, 12, 4, 6, 2, 10, 5, 11, 9, 14, 8, 13, 7, 0, 3, 15, 1, 252, 244, 246, 242,
250, 245, 251, 249, 254, 248, 253, 247, 240, 243, 255, 241,
],
[
203, 195, 197, 200, 194, 207, 202, 205, 206, 193, 199, 196, 204, 201, 198, 192, 139,
131, 133, 136, 130, 143, 138, 141, 142, 129, 135, 132, 140, 137, 134, 128, 43, 35, 37,
40, 34, 47, 42, 45, 46, 33, 39, 36, 44, 41, 38, 32, 27, 19, 21, 24, 18, 31, 26, 29, 30,
17, 23, 20, 28, 25, 22, 16, 219, 211, 213, 216, 210, 223, 218, 221, 222, 209, 215, 212,
220, 217, 214, 208, 75, 67, 69, 72, 66, 79, 74, 77, 78, 65, 71, 68, 76, 73, 70, 64,
251, 243, 245, 248, 242, 255, 250, 253, 254, 241, 247, 244, 252, 249, 246, 240, 107,
99, 101, 104, 98, 111, 106, 109, 110, 97, 103, 100, 108, 105, 102, 96, 123, 115, 117,
120, 114, 127, 122, 125, 126, 113, 119, 116, 124, 121, 118, 112, 11, 3, 5, 8, 2, 15,
10, 13, 14, 1, 7, 4, 12, 9, 6, 0, 171, 163, 165, 168, 162, 175, 170, 173, 174, 161,
167, 164, 172, 169, 166, 160, 91, 83, 85, 88, 82, 95, 90, 93, 94, 81, 87, 84, 92, 89,
86, 80, 59, 51, 53, 56, 50, 63, 58, 61, 62, 49, 55, 52, 60, 57, 54, 48, 235, 227, 229,
232, 226, 239, 234, 237, 238, 225, 231, 228, 236, 233, 230, 224, 155, 147, 149, 152,
146, 159, 154, 157, 158, 145, 151, 148, 156, 153, 150, 144, 187, 179, 181, 184, 178,
191, 186, 189, 190, 177, 183, 180, 188, 185, 182, 176,
],
[
87, 95, 85, 90, 88, 81, 86, 93, 80, 89, 83, 94, 91, 84, 82, 92, 215, 223, 213, 218,
216, 209, 214, 221, 208, 217, 211, 222, 219, 212, 210, 220, 247, 255, 245, 250, 248,
241, 246, 253, 240, 249, 243, 254, 251, 244, 242, 252, 103, 111, 101, 106, 104, 97,
102, 109, 96, 105, 99, 110, 107, 100, 98, 108, 151, 159, 149, 154, 152, 145, 150, 157,
144, 153, 147, 158, 155, 148, 146, 156, 39, 47, 37, 42, 40, 33, 38, 45, 32, 41, 35, 46,
43, 36, 34, 44, 199, 207, 197, 202, 200, 193, 198, 205, 192, 201, 195, 206, 203, 196,
194, 204, 167, 175, 165, 170, 168, 161, 166, 173, 160, 169, 163, 174, 171, 164, 162,
172, 183, 191, 181, 186, 184, 177, 182, 189, 176, 185, 179, 190, 187, 180, 178, 188,
119, 127, 117, 122, 120, 113, 118, 125, 112, 121, 115, 126, 123, 116, 114, 124, 135,
143, 133, 138, 136, 129, 134, 141, 128, 137, 131, 142, 139, 132, 130, 140, 23, 31, 21,
26, 24, 17, 22, 29, 16, 25, 19, 30, 27, 20, 18, 28, 71, 79, 69, 74, 72, 65, 70, 77, 64,
73, 67, 78, 75, 68, 66, 76, 55, 63, 53, 58, 56, 49, 54, 61, 48, 57, 51, 62, 59, 52, 50,
60, 231, 239, 229, 234, 232, 225, 230, 237, 224, 233, 227, 238, 235, 228, 226, 236, 7,
15, 5, 10, 8, 1, 6, 13, 0, 9, 3, 14, 11, 4, 2, 12,
],
[
24, 30, 18, 21, 22, 25, 17, 28, 31, 20, 27, 16, 29, 26, 19, 23, 120, 126, 114, 117,
118, 121, 113, 124, 127, 116, 123, 112, 125, 122, 115, 119, 232, 238, 226, 229, 230,
233, 225, 236, 239, 228, 235, 224, 237, 234, 227, 231, 216, 222, 210, 213, 214, 217,
209, 220, 223, 212, 219, 208, 221, 218, 211, 215, 8, 14, 2, 5, 6, 9, 1, 12, 15, 4, 11,
0, 13, 10, 3, 7, 88, 94, 82, 85, 86, 89, 81, 92, 95, 84, 91, 80, 93, 90, 83, 87, 136,
142, 130, 133, 134, 137, 129, 140, 143, 132, 139, 128, 141, 138, 131, 135, 56, 62, 50,
53, 54, 57, 49, 60, 63, 52, 59, 48, 61, 58, 51, 55, 72, 78, 66, 69, 70, 73, 65, 76, 79,
68, 75, 64, 77, 74, 67, 71, 248, 254, 242, 245, 246, 249, 241, 252, 255, 244, 251, 240,
253, 250, 243, 247, 168, 174, 162, 165, 166, 169, 161, 172, 175, 164, 171, 160, 173,
170, 163, 167, 104, 110, 98, 101, 102, 105, 97, 108, 111, 100, 107, 96, 109, 106, 99,
103, 152, 158, 146, 149, 150, 153, 145, 156, 159, 148, 155, 144, 157, 154, 147, 151,
200, 206, 194, 197, 198, 201, 193, 204, 207, 196, 203, 192, 205, 202, 195, 199, 184,
190, 178, 181, 182, 185, 177, 188, 191, 180, 187, 176, 189, 186, 179, 183, 40, 46, 34,
37, 38, 41, 33, 44, 47, 36, 43, 32, 45, 42, 35, 39,
],
];
const SBOX: SmallSbox = [
[12, 4, 6, 2, 10, 5, 11, 9, 14, 8, 13, 7, 0, 3, 15, 1],
[6, 8, 2, 3, 9, 10, 5, 12, 1, 14, 4, 7, 11, 13, 0, 15],
[11, 3, 5, 8, 2, 15, 10, 13, 14, 1, 7, 4, 12, 9, 6, 0],
[12, 8, 2, 1, 13, 4, 15, 6, 7, 0, 10, 5, 3, 14, 9, 11],
[7, 15, 5, 10, 8, 1, 6, 13, 0, 9, 3, 14, 11, 4, 2, 12],
[5, 13, 15, 6, 9, 2, 12, 10, 11, 7, 8, 1, 4, 3, 14, 0],
[8, 14, 2, 5, 6, 9, 1, 12, 15, 4, 11, 0, 13, 10, 3, 7],
[1, 7, 14, 13, 0, 5, 8, 3, 4, 15, 10, 6, 9, 12, 11, 2],
];
}
#[derive(Clone)]
pub enum TestSbox {}
impl Sbox for TestSbox {
const NAME: &'static str = "TestSbox";
const EXP_SBOX: ExpSbox = [
[
228, 234, 233, 226, 237, 232, 224, 238, 230, 235, 225, 236, 231, 239, 229, 227, 180,
186, 185, 178, 189, 184, 176, 190, 182, 187, 177, 188, 183, 191, 181, 179, 68, 74, 73,
66, 77, 72, 64, 78, 70, 75, 65, 76, 71, 79, 69, 67, 196, 202, 201, 194, 205, 200, 192,
206, 198, 203, 193, 204, 199, 207, 197, 195, 100, 106, 105, 98, 109, 104, 96, 110, 102,
107, 97, 108, 103, 111, 101, 99, 212, 218, 217, 210, 221, 216, 208, 222, 214, 219, 209,
220, 215, 223, 213, 211, 244, 250, 249, 242, 253, 248, 240, 254, 246, 251, 241, 252,
247, 255, 245, 243, 164, 170, 169, 162, 173, 168, 160, 174, 166, 171, 161, 172, 167,
175, 165, 163, 36, 42, 41, 34, 45, 40, 32, 46, 38, 43, 33, 44, 39, 47, 37, 35, 52, 58,
57, 50, 61, 56, 48, 62, 54, 59, 49, 60, 55, 63, 53, 51, 132, 138, 137, 130, 141, 136,
128, 142, 134, 139, 129, 140, 135, 143, 133, 131, 20, 26, 25, 18, 29, 24, 16, 30, 22,
27, 17, 28, 23, 31, 21, 19, 4, 10, 9, 2, 13, 8, 0, 14, 6, 11, 1, 12, 7, 15, 5, 3, 116,
122, 121, 114, 125, 120, 112, 126, 118, 123, 113, 124, 119, 127, 117, 115, 84, 90, 89,
82, 93, 88, 80, 94, 86, 91, 81, 92, 87, 95, 85, 83, 148, 154, 153, 146, 157, 152, 144,
158, 150, 155, 145, 156, 151, 159, 149, 147,
],
[
117, 120, 113, 125, 122, 115, 116, 114, 126, 127, 124, 119, 118, 112, 121, 123, 213,
216, 209, 221, 218, 211, 212, 210, 222, 223, 220, 215, 214, 208, 217, 219, 165, 168,
161, 173, 170, 163, 164, 162, 174, 175, 172, 167, 166, 160, 169, 171, 21, 24, 17, 29,
26, 19, 20, 18, 30, 31, 28, 23, 22, 16, 25, 27, 5, 8, 1, 13, 10, 3, 4, 2, 14, 15, 12,
7, 6, 0, 9, 11, 133, 136, 129, 141, 138, 131, 132, 130, 142, 143, 140, 135, 134, 128,
137, 139, 149, 152, 145, 157, 154, 147, 148, 146, 158, 159, 156, 151, 150, 144, 153,
155, 245, 248, 241, 253, 250, 243, 244, 242, 254, 255, 252, 247, 246, 240, 249, 251,
229, 232, 225, 237, 234, 227, 228, 226, 238, 239, 236, 231, 230, 224, 233, 235, 69, 72,
65, 77, 74, 67, 68, 66, 78, 79, 76, 71, 70, 64, 73, 75, 101, 104, 97, 109, 106, 99,
100, 98, 110, 111, 108, 103, 102, 96, 105, 107, 197, 200, 193, 205, 202, 195, 196, 194,
206, 207, 204, 199, 198, 192, 201, 203, 181, 184, 177, 189, 186, 179, 180, 178, 190,
191, 188, 183, 182, 176, 185, 187, 37, 40, 33, 45, 42, 35, 36, 34, 46, 47, 44, 39, 38,
32, 41, 43, 85, 88, 81, 93, 90, 83, 84, 82, 94, 95, 92, 87, 86, 80, 89, 91, 53, 56, 49,
61, 58, 51, 52, 50, 62, 63, 60, 55, 54, 48, 57, 59,
],
[
70, 76, 71, 65, 69, 79, 77, 72, 68, 74, 73, 78, 64, 67, 75, 66, 182, 188, 183, 177,
181, 191, 189, 184, 180, 186, 185, 190, 176, 179, 187, 178, 166, 172, 167, 161, 165,
175, 173, 168, 164, 170, 169, 174, 160, 163, 171, 162, 6, 12, 7, 1, 5, 15, 13, 8, 4,
10, 9, 14, 0, 3, 11, 2, 118, 124, 119, 113, 117, 127, 125, 120, 116, 122, 121, 126,
112, 115, 123, 114, 38, 44, 39, 33, 37, 47, 45, 40, 36, 42, 41, 46, 32, 35, 43, 34, 22,
28, 23, 17, 21, 31, 29, 24, 20, 26, 25, 30, 16, 19, 27, 18, 214, 220, 215, 209, 213,
223, 221, 216, 212, 218, 217, 222, 208, 211, 219, 210, 54, 60, 55, 49, 53, 63, 61, 56,
52, 58, 57, 62, 48, 51, 59, 50, 102, 108, 103, 97, 101, 111, 109, 104, 100, 106, 105,
110, 96, 99, 107, 98, 134, 140, 135, 129, 133, 143, 141, 136, 132, 138, 137, 142, 128,
131, 139, 130, 86, 92, 87, 81, 85, 95, 93, 88, 84, 90, 89, 94, 80, 83, 91, 82, 150,
156, 151, 145, 149, 159, 157, 152, 148, 154, 153, 158, 144, 147, 155, 146, 198, 204,
199, 193, 197, 207, 205, 200, 196, 202, 201, 206, 192, 195, 203, 194, 246, 252, 247,
241, 245, 255, 253, 248, 244, 250, 249, 254, 240, 243, 251, 242, 230, 236, 231, 225,
229, 239, 237, 232, 228, 234, 233, 238, 224, 227, 235, 226,
],
[
29, 27, 20, 17, 19, 31, 21, 25, 16, 26, 30, 23, 22, 24, 18, 28, 253, 251, 244, 241,
243, 255, 245, 249, 240, 250, 254, 247, 246, 248, 242, 252, 221, 219, 212, 209, 211,
223, 213, 217, 208, 218, 222, 215, 214, 216, 210, 220, 13, 11, 4, 1, 3, 15, 5, 9, 0,
10, 14, 7, 6, 8, 2, 12, 93, 91, 84, 81, 83, 95, 85, 89, 80, 90, 94, 87, 86, 88, 82, 92,
125, 123, 116, 113, 115, 127, 117, 121, 112, 122, 126, 119, 118, 120, 114, 124, 173,
171, 164, 161, 163, 175, 165, 169, 160, 170, 174, 167, 166, 168, 162, 172, 77, 75, 68,
65, 67, 79, 69, 73, 64, 74, 78, 71, 70, 72, 66, 76, 157, 155, 148, 145, 147, 159, 149,
153, 144, 154, 158, 151, 150, 152, 146, 156, 45, 43, 36, 33, 35, 47, 37, 41, 32, 42,
46, 39, 38, 40, 34, 44, 61, 59, 52, 49, 51, 63, 53, 57, 48, 58, 62, 55, 54, 56, 50, 60,
237, 235, 228, 225, 227, 239, 229, 233, 224, 234, 238, 231, 230, 232, 226, 236, 109,
107, 100, 97, 99, 111, 101, 105, 96, 106, 110, 103, 102, 104, 98, 108, 189, 187, 180,
177, 179, 191, 181, 185, 176, 186, 190, 183, 182, 184, 178, 188, 141, 139, 132, 129,
131, 143, 133, 137, 128, 138, 142, 135, 134, 136, 130, 140, 205, 203, 196, 193, 195,
207, 197, 201, 192, 202, 206, 199, 198, 200, 194, 204,
],
];
const SBOX: SmallSbox = [
[4, 10, 9, 2, 13, 8, 0, 14, 6, 11, 1, 12, 7, 15, 5, 3],
[14, 11, 4, 12, 6, 13, 15, 10, 2, 3, 8, 1, 0, 7, 5, 9],
[5, 8, 1, 13, 10, 3, 4, 2, 14, 15, 12, 7, 6, 0, 9, 11],
[7, 13, 10, 1, 0, 8, 9, 15, 14, 4, 6, 12, 11, 2, 5, 3],
[6, 12, 7, 1, 5, 15, 13, 8, 4, 10, 9, 14, 0, 3, 11, 2],
[4, 11, 10, 0, 7, 2, 1, 13, 3, 6, 8, 5, 9, 12, 15, 14],
[13, 11, 4, 1, 3, 15, 5, 9, 0, 10, 14, 7, 6, 8, 2, 12],
[1, 15, 13, 0, 5, 7, 10, 4, 9, 2, 3, 14, 6, 11, 8, 12],
];
}
#[derive(Clone)]
pub enum CryptoProA {}
impl Sbox for CryptoProA {
const NAME: &'static str = "CryptoProA";
const EXP_SBOX: ExpSbox = [
[
57, 54, 51, 50, 56, 59, 49, 55, 58, 52, 62, 63, 60, 48, 61, 53, 121, 118, 115, 114,
120, 123, 113, 119, 122, 116, 126, 127, 124, 112, 125, 117, 233, 230, 227, 226, 232,
235, 225, 231, 234, 228, 238, 239, 236, 224, 237, 229, 153, 150, 147, 146, 152, 155,
145, 151, 154, 148, 158, 159, 156, 144, 157, 149, 137, 134, 131, 130, 136, 139, 129,
135, 138, 132, 142, 143, 140, 128, 141, 133, 169, 166, 163, 162, 168, 171, 161, 167,
170, 164, 174, 175, 172, 160, 173, 165, 249, 246, 243, 242, 248, 251, 241, 247, 250,
244, 254, 255, 252, 240, 253, 245, 9, 6, 3, 2, 8, 11, 1, 7, 10, 4, 14, 15, 12, 0, 13,
5, 89, 86, 83, 82, 88, 91, 81, 87, 90, 84, 94, 95, 92, 80, 93, 85, 41, 38, 35, 34, 40,
43, 33, 39, 42, 36, 46, 47, 44, 32, 45, 37, 105, 102, 99, 98, 104, 107, 97, 103, 106,
100, 110, 111, 108, 96, 109, 101, 201, 198, 195, 194, 200, 203, 193, 199, 202, 196,
206, 207, 204, 192, 205, 197, 185, 182, 179, 178, 184, 187, 177, 183, 186, 180, 190,
191, 188, 176, 189, 181, 73, 70, 67, 66, 72, 75, 65, 71, 74, 68, 78, 79, 76, 64, 77,
69, 217, 214, 211, 210, 216, 219, 209, 215, 218, 212, 222, 223, 220, 208, 221, 213, 25,
22, 19, 18, 24, 27, 17, 23, 26, 20, 30, 31, 28, 16, 29, 21,
],
[
238, 228, 230, 226, 235, 227, 237, 232, 236, 239, 229, 234, 224, 231, 225, 233, 126,
116, 118, 114, 123, 115, 125, 120, 124, 127, 117, 122, 112, 119, 113, 121, 174, 164,
166, 162, 171, 163, 173, 168, 172, 175, 165, 170, 160, 167, 161, 169, 206, 196, 198,
194, 203, 195, 205, 200, 204, 207, 197, 202, 192, 199, 193, 201, 222, 212, 214, 210,
219, 211, 221, 216, 220, 223, 213, 218, 208, 215, 209, 217, 30, 20, 22, 18, 27, 19, 29,
24, 28, 31, 21, 26, 16, 23, 17, 25, 62, 52, 54, 50, 59, 51, 61, 56, 60, 63, 53, 58, 48,
55, 49, 57, 158, 148, 150, 146, 155, 147, 157, 152, 156, 159, 149, 154, 144, 151, 145,
153, 14, 4, 6, 2, 11, 3, 13, 8, 12, 15, 5, 10, 0, 7, 1, 9, 46, 36, 38, 34, 43, 35, 45,
40, 44, 47, 37, 42, 32, 39, 33, 41, 190, 180, 182, 178, 187, 179, 189, 184, 188, 191,
181, 186, 176, 183, 177, 185, 78, 68, 70, 66, 75, 67, 77, 72, 76, 79, 69, 74, 64, 71,
65, 73, 254, 244, 246, 242, 251, 243, 253, 248, 252, 255, 245, 250, 240, 247, 241, 249,
142, 132, 134, 130, 139, 131, 141, 136, 140, 143, 133, 138, 128, 135, 129, 137, 94, 84,
86, 82, 91, 83, 93, 88, 92, 95, 85, 90, 80, 87, 81, 89, 110, 100, 102, 98, 107, 99,
109, 104, 108, 111, 101, 106, 96, 103, 97, 105,
],
[
59, 53, 49, 57, 56, 61, 63, 48, 62, 52, 50, 51, 60, 55, 58, 54, 171, 165, 161, 169,
168, 173, 175, 160, 174, 164, 162, 163, 172, 167, 170, 166, 219, 213, 209, 217, 216,
221, 223, 208, 222, 212, 210, 211, 220, 215, 218, 214, 203, 197, 193, 201, 200, 205,
207, 192, 206, 196, 194, 195, 204, 199, 202, 198, 27, 21, 17, 25, 24, 29, 31, 16, 30,
20, 18, 19, 28, 23, 26, 22, 43, 37, 33, 41, 40, 45, 47, 32, 46, 36, 34, 35, 44, 39, 42,
38, 11, 5, 1, 9, 8, 13, 15, 0, 14, 4, 2, 3, 12, 7, 10, 6, 187, 181, 177, 185, 184, 189,
191, 176, 190, 180, 178, 179, 188, 183, 186, 182, 123, 117, 113, 121, 120, 125, 127,
112, 126, 116, 114, 115, 124, 119, 122, 118, 91, 85, 81, 89, 88, 93, 95, 80, 94, 84,
82, 83, 92, 87, 90, 86, 155, 149, 145, 153, 152, 157, 159, 144, 158, 148, 146, 147,
156, 151, 154, 150, 75, 69, 65, 73, 72, 77, 79, 64, 78, 68, 66, 67, 76, 71, 74, 70,
139, 133, 129, 137, 136, 141, 143, 128, 142, 132, 130, 131, 140, 135, 138, 134, 251,
245, 241, 249, 248, 253, 255, 240, 254, 244, 242, 243, 252, 247, 250, 246, 235, 229,
225, 233, 232, 237, 239, 224, 238, 228, 226, 227, 236, 231, 234, 230, 107, 101, 97,
105, 104, 109, 111, 96, 110, 100, 98, 99, 108, 103, 106, 102,
],
[
177, 189, 178, 185, 183, 186, 182, 176, 184, 188, 180, 181, 191, 179, 187, 190, 161,
173, 162, 169, 167, 170, 166, 160, 168, 172, 164, 165, 175, 163, 171, 174, 241, 253,
242, 249, 247, 250, 246, 240, 248, 252, 244, 245, 255, 243, 251, 254, 81, 93, 82, 89,
87, 90, 86, 80, 88, 92, 84, 85, 95, 83, 91, 94, 1, 13, 2, 9, 7, 10, 6, 0, 8, 12, 4, 5,
15, 3, 11, 14, 193, 205, 194, 201, 199, 202, 198, 192, 200, 204, 196, 197, 207, 195,
203, 206, 225, 237, 226, 233, 231, 234, 230, 224, 232, 236, 228, 229, 239, 227, 235,
238, 129, 141, 130, 137, 135, 138, 134, 128, 136, 140, 132, 133, 143, 131, 139, 142,
97, 109, 98, 105, 103, 106, 102, 96, 104, 108, 100, 101, 111, 99, 107, 110, 33, 45, 34,
41, 39, 42, 38, 32, 40, 44, 36, 37, 47, 35, 43, 46, 49, 61, 50, 57, 55, 58, 54, 48, 56,
60, 52, 53, 63, 51, 59, 62, 145, 157, 146, 153, 151, 154, 150, 144, 152, 156, 148, 149,
159, 147, 155, 158, 17, 29, 18, 25, 23, 26, 22, 16, 24, 28, 20, 21, 31, 19, 27, 30,
113, 125, 114, 121, 119, 122, 118, 112, 120, 124, 116, 117, 127, 115, 123, 126, 209,
221, 210, 217, 215, 218, 214, 208, 216, 220, 212, 213, 223, 211, 219, 222, 65, 77, 66,
73, 71, 74, 70, 64, 72, 76, 68, 69, 79, 67, 75, 78,
],
];
const SBOX: SmallSbox = [
[9, 6, 3, 2, 8, 11, 1, 7, 10, 4, 14, 15, 12, 0, 13, 5],
[3, 7, 14, 9, 8, 10, 15, 0, 5, 2, 6, 12, 11, 4, 13, 1],
[14, 4, 6, 2, 11, 3, 13, 8, 12, 15, 5, 10, 0, 7, 1, 9],
[14, 7, 10, 12, 13, 1, 3, 9, 0, 2, 11, 4, 15, 8, 5, 6],
[11, 5, 1, 9, 8, 13, 15, 0, 14, 4, 2, 3, 12, 7, 10, 6],
[3, 10, 13, 12, 1, 2, 0, 11, 7, 5, 9, 4, 8, 15, 14, 6],
[1, 13, 2, 9, 7, 10, 6, 0, 8, 12, 4, 5, 15, 3, 11, 14],
[11, 10, 15, 5, 0, 12, 14, 8, 6, 2, 3, 9, 1, 7, 13, 4],
];
}
#[derive(Clone)]
pub enum CryptoProB {}
impl Sbox for CryptoProB {
const NAME: &'static str = "CryptoProB";
const EXP_SBOX: ExpSbox = [
[
8, 4, 11, 1, 3, 5, 0, 9, 2, 14, 10, 12, 13, 6, 7, 15, 24, 20, 27, 17, 19, 21, 16, 25,
18, 30, 26, 28, 29, 22, 23, 31, 40, 36, 43, 33, 35, 37, 32, 41, 34, 46, 42, 44, 45, 38,
39, 47, 168, 164, 171, 161, 163, 165, 160, 169, 162, 174, 170, 172, 173, 166, 167, 175,
72, 68, 75, 65, 67, 69, 64, 73, 66, 78, 74, 76, 77, 70, 71, 79, 216, 212, 219, 209,
211, 213, 208, 217, 210, 222, 218, 220, 221, 214, 215, 223, 88, 84, 91, 81, 83, 85, 80,
89, 82, 94, 90, 92, 93, 86, 87, 95, 200, 196, 203, 193, 195, 197, 192, 201, 194, 206,
202, 204, 205, 198, 199, 207, 152, 148, 155, 145, 147, 149, 144, 153, 146, 158, 154,
156, 157, 150, 151, 159, 120, 116, 123, 113, 115, 117, 112, 121, 114, 126, 122, 124,
125, 118, 119, 127, 56, 52, 59, 49, 51, 53, 48, 57, 50, 62, 58, 60, 61, 54, 55, 63,
248, 244, 251, 241, 243, 245, 240, 249, 242, 254, 250, 252, 253, 246, 247, 255, 184,
180, 187, 177, 179, 181, 176, 185, 178, 190, 186, 188, 189, 182, 183, 191, 136, 132,
139, 129, 131, 133, 128, 137, 130, 142, 138, 140, 141, 134, 135, 143, 104, 100, 107,
97, 99, 101, 96, 105, 98, 110, 106, 108, 109, 102, 103, 111, 232, 228, 235, 225, 227,
229, 224, 233, 226, 238, 234, 236, 237, 230, 231, 239,
],
[
126, 124, 112, 122, 121, 114, 125, 123, 119, 117, 120, 127, 115, 118, 113, 116, 94, 92,
80, 90, 89, 82, 93, 91, 87, 85, 88, 95, 83, 86, 81, 84, 14, 12, 0, 10, 9, 2, 13, 11, 7,
5, 8, 15, 3, 6, 1, 4, 222, 220, 208, 218, 217, 210, 221, 219, 215, 213, 216, 223, 211,
214, 209, 212, 190, 188, 176, 186, 185, 178, 189, 187, 183, 181, 184, 191, 179, 182,
177, 180, 110, 108, 96, 106, 105, 98, 109, 107, 103, 101, 104, 111, 99, 102, 97, 100,
30, 28, 16, 26, 25, 18, 29, 27, 23, 21, 24, 31, 19, 22, 17, 20, 46, 44, 32, 42, 41, 34,
45, 43, 39, 37, 40, 47, 35, 38, 33, 36, 62, 60, 48, 58, 57, 50, 61, 59, 55, 53, 56, 63,
51, 54, 49, 52, 174, 172, 160, 170, 169, 162, 173, 171, 167, 165, 168, 175, 163, 166,
161, 164, 206, 204, 192, 202, 201, 194, 205, 203, 199, 197, 200, 207, 195, 198, 193,
196, 254, 252, 240, 250, 249, 242, 253, 251, 247, 245, 248, 255, 243, 246, 241, 244,
78, 76, 64, 74, 73, 66, 77, 75, 71, 69, 72, 79, 67, 70, 65, 68, 238, 236, 224, 234,
233, 226, 237, 235, 231, 229, 232, 239, 227, 230, 225, 228, 158, 156, 144, 154, 153,
146, 157, 155, 151, 149, 152, 159, 147, 150, 145, 148, 142, 140, 128, 138, 137, 130,
141, 139, 135, 133, 136, 143, 131, 134, 129, 132,
],
[
130, 135, 140, 143, 137, 133, 138, 139, 129, 132, 128, 141, 134, 136, 142, 131, 50, 55,
60, 63, 57, 53, 58, 59, 49, 52, 48, 61, 54, 56, 62, 51, 34, 39, 44, 47, 41, 37, 42, 43,
33, 36, 32, 45, 38, 40, 46, 35, 98, 103, 108, 111, 105, 101, 106, 107, 97, 100, 96,
109, 102, 104, 110, 99, 66, 71, 76, 79, 73, 69, 74, 75, 65, 68, 64, 77, 70, 72, 78, 67,
210, 215, 220, 223, 217, 213, 218, 219, 209, 212, 208, 221, 214, 216, 222, 211, 226,
231, 236, 239, 233, 229, 234, 235, 225, 228, 224, 237, 230, 232, 238, 227, 178, 183,
188, 191, 185, 181, 186, 187, 177, 180, 176, 189, 182, 184, 190, 179, 194, 199, 204,
207, 201, 197, 202, 203, 193, 196, 192, 205, 198, 200, 206, 195, 18, 23, 28, 31, 25,
21, 26, 27, 17, 20, 16, 29, 22, 24, 30, 19, 114, 119, 124, 127, 121, 117, 122, 123,
113, 116, 112, 125, 118, 120, 126, 115, 242, 247, 252, 255, 249, 245, 250, 251, 241,
244, 240, 253, 246, 248, 254, 243, 162, 167, 172, 175, 169, 165, 170, 171, 161, 164,
160, 173, 166, 168, 174, 163, 2, 7, 12, 15, 9, 5, 10, 11, 1, 4, 0, 13, 6, 8, 14, 3,
146, 151, 156, 159, 153, 149, 154, 155, 145, 148, 144, 157, 150, 152, 158, 147, 82, 87,
92, 95, 89, 85, 90, 91, 81, 84, 80, 93, 86, 88, 94, 83,
],
[
5, 2, 10, 11, 9, 1, 12, 3, 7, 4, 13, 0, 6, 15, 8, 14, 69, 66, 74, 75, 73, 65, 76, 67,
71, 68, 77, 64, 70, 79, 72, 78, 181, 178, 186, 187, 185, 177, 188, 179, 183, 180, 189,
176, 182, 191, 184, 190, 229, 226, 234, 235, 233, 225, 236, 227, 231, 228, 237, 224,
230, 239, 232, 238, 133, 130, 138, 139, 137, 129, 140, 131, 135, 132, 141, 128, 134,
143, 136, 142, 53, 50, 58, 59, 57, 49, 60, 51, 55, 52, 61, 48, 54, 63, 56, 62, 117,
114, 122, 123, 121, 113, 124, 115, 119, 116, 125, 112, 118, 127, 120, 126, 21, 18, 26,
27, 25, 17, 28, 19, 23, 20, 29, 16, 22, 31, 24, 30, 165, 162, 170, 171, 169, 161, 172,
163, 167, 164, 173, 160, 166, 175, 168, 174, 37, 34, 42, 43, 41, 33, 44, 35, 39, 36,
45, 32, 38, 47, 40, 46, 149, 146, 154, 155, 153, 145, 156, 147, 151, 148, 157, 144,
150, 159, 152, 158, 101, 98, 106, 107, 105, 97, 108, 99, 103, 100, 109, 96, 102, 111,
104, 110, 245, 242, 250, 251, 249, 241, 252, 243, 247, 244, 253, 240, 246, 255, 248,
254, 213, 210, 218, 219, 217, 209, 220, 211, 215, 212, 221, 208, 214, 223, 216, 222,
85, 82, 90, 91, 89, 81, 92, 83, 87, 84, 93, 80, 86, 95, 88, 94, 197, 194, 202, 203,
201, 193, 204, 195, 199, 196, 205, 192, 198, 207, 200, 206,
],
];
const SBOX: SmallSbox = [
[8, 4, 11, 1, 3, 5, 0, 9, 2, 14, 10, 12, 13, 6, 7, 15],
[0, 1, 2, 10, 4, 13, 5, 12, 9, 7, 3, 15, 11, 8, 6, 14],
[14, 12, 0, 10, 9, 2, 13, 11, 7, 5, 8, 15, 3, 6, 1, 4],
[7, 5, 0, 13, 11, 6, 1, 2, 3, 10, 12, 15, 4, 14, 9, 8],
[2, 7, 12, 15, 9, 5, 10, 11, 1, 4, 0, 13, 6, 8, 14, 3],
[8, 3, 2, 6, 4, 13, 14, 11, 12, 1, 7, 15, 10, 0, 9, 5],
[5, 2, 10, 11, 9, 1, 12, 3, 7, 4, 13, 0, 6, 15, 8, 14],
[0, 4, 11, 14, 8, 3, 7, 1, 10, 2, 9, 6, 15, 13, 5, 12],
];
}
#[derive(Clone)]
pub enum CryptoProC {}
impl Sbox for CryptoProC {
const NAME: &'static str = "CryptoProC";
const EXP_SBOX: ExpSbox = [
[
1, 11, 12, 2, 9, 13, 0, 15, 4, 5, 8, 14, 10, 7, 6, 3, 17, 27, 28, 18, 25, 29, 16, 31,
20, 21, 24, 30, 26, 23, 22, 19, 113, 123, 124, 114, 121, 125, 112, 127, 116, 117, 120,
126, 122, 119, 118, 115, 209, 219, 220, 210, 217, 221, 208, 223, 212, 213, 216, 222,
218, 215, 214, 211, 177, 187, 188, 178, 185, 189, 176, 191, 180, 181, 184, 190, 186,
183, 182, 179, 65, 75, 76, 66, 73, 77, 64, 79, 68, 69, 72, 78, 74, 71, 70, 67, 81, 91,
92, 82, 89, 93, 80, 95, 84, 85, 88, 94, 90, 87, 86, 83, 33, 43, 44, 34, 41, 45, 32, 47,
36, 37, 40, 46, 42, 39, 38, 35, 129, 139, 140, 130, 137, 141, 128, 143, 132, 133, 136,
142, 138, 135, 134, 131, 225, 235, 236, 226, 233, 237, 224, 239, 228, 229, 232, 238,
234, 231, 230, 227, 241, 251, 252, 242, 249, 253, 240, 255, 244, 245, 248, 254, 250,
247, 246, 243, 193, 203, 204, 194, 201, 205, 192, 207, 196, 197, 200, 206, 202, 199,
198, 195, 145, 155, 156, 146, 153, 157, 144, 159, 148, 149, 152, 158, 154, 151, 150,
147, 161, 171, 172, 162, 169, 173, 160, 175, 164, 165, 168, 174, 170, 167, 166, 163,
97, 107, 108, 98, 105, 109, 96, 111, 100, 101, 104, 110, 106, 103, 102, 99, 49, 59, 60,
50, 57, 61, 48, 63, 52, 53, 56, 62, 58, 55, 54, 51,
],
[
56, 50, 53, 48, 52, 57, 63, 58, 51, 55, 60, 61, 54, 62, 49, 59, 104, 98, 101, 96, 100,
105, 111, 106, 99, 103, 108, 109, 102, 110, 97, 107, 8, 2, 5, 0, 4, 9, 15, 10, 3, 7,
12, 13, 6, 14, 1, 11, 24, 18, 21, 16, 20, 25, 31, 26, 19, 23, 28, 29, 22, 30, 17, 27,
88, 82, 85, 80, 84, 89, 95, 90, 83, 87, 92, 93, 86, 94, 81, 91, 216, 210, 213, 208,
212, 217, 223, 218, 211, 215, 220, 221, 214, 222, 209, 219, 168, 162, 165, 160, 164,
169, 175, 170, 163, 167, 172, 173, 166, 174, 161, 171, 136, 130, 133, 128, 132, 137,
143, 138, 131, 135, 140, 141, 134, 142, 129, 139, 184, 178, 181, 176, 180, 185, 191,
186, 179, 183, 188, 189, 182, 190, 177, 187, 40, 34, 37, 32, 36, 41, 47, 42, 35, 39,
44, 45, 38, 46, 33, 43, 152, 146, 149, 144, 148, 153, 159, 154, 147, 151, 156, 157,
150, 158, 145, 155, 120, 114, 117, 112, 116, 121, 127, 122, 115, 119, 124, 125, 118,
126, 113, 123, 232, 226, 229, 224, 228, 233, 239, 234, 227, 231, 236, 237, 230, 238,
225, 235, 248, 242, 245, 240, 244, 249, 255, 250, 243, 247, 252, 253, 246, 254, 241,
251, 200, 194, 197, 192, 196, 201, 207, 202, 195, 199, 204, 205, 198, 206, 193, 203,
72, 66, 69, 64, 68, 73, 79, 74, 67, 71, 76, 77, 70, 78, 65, 75,
],
[
200, 205, 203, 192, 196, 197, 193, 194, 201, 195, 204, 206, 198, 207, 202, 199, 152,
157, 155, 144, 148, 149, 145, 146, 153, 147, 156, 158, 150, 159, 154, 151, 184, 189,
187, 176, 180, 181, 177, 178, 185, 179, 188, 190, 182, 191, 186, 183, 24, 29, 27, 16,
20, 21, 17, 18, 25, 19, 28, 30, 22, 31, 26, 23, 136, 141, 139, 128, 132, 133, 129, 130,
137, 131, 140, 142, 134, 143, 138, 135, 232, 237, 235, 224, 228, 229, 225, 226, 233,
227, 236, 238, 230, 239, 234, 231, 40, 45, 43, 32, 36, 37, 33, 34, 41, 35, 44, 46, 38,
47, 42, 39, 72, 77, 75, 64, 68, 69, 65, 66, 73, 67, 76, 78, 70, 79, 74, 71, 120, 125,
123, 112, 116, 117, 113, 114, 121, 115, 124, 126, 118, 127, 122, 119, 56, 61, 59, 48,
52, 53, 49, 50, 57, 51, 60, 62, 54, 63, 58, 55, 104, 109, 107, 96, 100, 101, 97, 98,
105, 99, 108, 110, 102, 111, 106, 103, 88, 93, 91, 80, 84, 85, 81, 82, 89, 83, 92, 94,
86, 95, 90, 87, 168, 173, 171, 160, 164, 165, 161, 162, 169, 163, 172, 174, 166, 175,
170, 167, 8, 13, 11, 0, 4, 5, 1, 2, 9, 3, 12, 14, 6, 15, 10, 7, 248, 253, 251, 240,
244, 245, 241, 242, 249, 243, 252, 254, 246, 255, 250, 247, 216, 221, 219, 208, 212,
213, 209, 210, 217, 211, 220, 222, 214, 223, 218, 215,
],
[
122, 121, 118, 120, 125, 126, 114, 112, 127, 115, 117, 123, 116, 113, 124, 119, 74, 73,
70, 72, 77, 78, 66, 64, 79, 67, 69, 75, 68, 65, 76, 71, 10, 9, 6, 8, 13, 14, 2, 0, 15,
3, 5, 11, 4, 1, 12, 7, 90, 89, 86, 88, 93, 94, 82, 80, 95, 83, 85, 91, 84, 81, 92, 87,
170, 169, 166, 168, 173, 174, 162, 160, 175, 163, 165, 171, 164, 161, 172, 167, 42, 41,
38, 40, 45, 46, 34, 32, 47, 35, 37, 43, 36, 33, 44, 39, 250, 249, 246, 248, 253, 254,
242, 240, 255, 243, 245, 251, 244, 241, 252, 247, 234, 233, 230, 232, 237, 238, 226,
224, 239, 227, 229, 235, 228, 225, 236, 231, 202, 201, 198, 200, 205, 206, 194, 192,
207, 195, 197, 203, 196, 193, 204, 199, 106, 105, 102, 104, 109, 110, 98, 96, 111, 99,
101, 107, 100, 97, 108, 103, 26, 25, 22, 24, 29, 30, 18, 16, 31, 19, 21, 27, 20, 17,
28, 23, 186, 185, 182, 184, 189, 190, 178, 176, 191, 179, 181, 187, 180, 177, 188, 183,
218, 217, 214, 216, 221, 222, 210, 208, 223, 211, 213, 219, 212, 209, 220, 215, 154,
153, 150, 152, 157, 158, 146, 144, 159, 147, 149, 155, 148, 145, 156, 151, 58, 57, 54,
56, 61, 62, 50, 48, 63, 51, 53, 59, 52, 49, 60, 55, 138, 137, 134, 136, 141, 142, 130,
128, 143, 131, 133, 139, 132, 129, 140, 135,
],
];
const SBOX: SmallSbox = [
[1, 11, 12, 2, 9, 13, 0, 15, 4, 5, 8, 14, 10, 7, 6, 3],
[0, 1, 7, 13, 11, 4, 5, 2, 8, 14, 15, 12, 9, 10, 6, 3],
[8, 2, 5, 0, 4, 9, 15, 10, 3, 7, 12, 13, 6, 14, 1, 11],
[3, 6, 0, 1, 5, 13, 10, 8, 11, 2, 9, 7, 14, 15, 12, 4],
[8, 13, 11, 0, 4, 5, 1, 2, 9, 3, 12, 14, 6, 15, 10, 7],
[12, 9, 11, 1, 8, 14, 2, 4, 7, 3, 6, 5, 10, 0, 15, 13],
[10, 9, 6, 8, 13, 14, 2, 0, 15, 3, 5, 11, 4, 1, 12, 7],
[7, 4, 0, 5, 10, 2, 15, 14, 12, 6, 1, 11, 13, 9, 3, 8],
];
}
#[derive(Clone)]
pub enum CryptoProD {}
impl Sbox for CryptoProD {
const NAME: &'static str = "CryptoProD";
const EXP_SBOX: ExpSbox = [
[
90, 84, 85, 86, 88, 81, 83, 87, 93, 92, 94, 80, 89, 82, 91, 95, 250, 244, 245, 246,
248, 241, 243, 247, 253, 252, 254, 240, 249, 242, 251, 255, 74, 68, 69, 70, 72, 65, 67,
71, 77, 76, 78, 64, 73, 66, 75, 79, 10, 4, 5, 6, 8, 1, 3, 7, 13, 12, 14, 0, 9, 2, 11,
15, 42, 36, 37, 38, 40, 33, 35, 39, 45, 44, 46, 32, 41, 34, 43, 47, 218, 212, 213, 214,
216, 209, 211, 215, 221, 220, 222, 208, 217, 210, 219, 223, 186, 180, 181, 182, 184,
177, 179, 183, 189, 188, 190, 176, 185, 178, 187, 191, 154, 148, 149, 150, 152, 145,
147, 151, 157, 156, 158, 144, 153, 146, 155, 159, 26, 20, 21, 22, 24, 17, 19, 23, 29,
28, 30, 16, 25, 18, 27, 31, 122, 116, 117, 118, 120, 113, 115, 119, 125, 124, 126, 112,
121, 114, 123, 127, 106, 100, 101, 102, 104, 97, 99, 103, 109, 108, 110, 96, 105, 98,
107, 111, 58, 52, 53, 54, 56, 49, 51, 55, 61, 60, 62, 48, 57, 50, 59, 63, 202, 196,
197, 198, 200, 193, 195, 199, 205, 204, 206, 192, 201, 194, 203, 207, 234, 228, 229,
230, 232, 225, 227, 231, 237, 236, 238, 224, 233, 226, 235, 239, 170, 164, 165, 166,
168, 161, 163, 167, 173, 172, 174, 160, 169, 162, 171, 175, 138, 132, 133, 134, 136,
129, 131, 135, 141, 140, 142, 128, 137, 130, 139, 143,
],
[
71, 79, 76, 78, 73, 68, 65, 64, 67, 75, 69, 66, 70, 74, 72, 77, 167, 175, 172, 174,
169, 164, 161, 160, 163, 171, 165, 162, 166, 170, 168, 173, 119, 127, 124, 126, 121,
116, 113, 112, 115, 123, 117, 114, 118, 122, 120, 125, 199, 207, 204, 206, 201, 196,
193, 192, 195, 203, 197, 194, 198, 202, 200, 205, 7, 15, 12, 14, 9, 4, 1, 0, 3, 11, 5,
2, 6, 10, 8, 13, 247, 255, 252, 254, 249, 244, 241, 240, 243, 251, 245, 242, 246, 250,
248, 253, 39, 47, 44, 46, 41, 36, 33, 32, 35, 43, 37, 34, 38, 42, 40, 45, 135, 143,
140, 142, 137, 132, 129, 128, 131, 139, 133, 130, 134, 138, 136, 141, 231, 239, 236,
238, 233, 228, 225, 224, 227, 235, 229, 226, 230, 234, 232, 237, 23, 31, 28, 30, 25,
20, 17, 16, 19, 27, 21, 18, 22, 26, 24, 29, 103, 111, 108, 110, 105, 100, 97, 96, 99,
107, 101, 98, 102, 106, 104, 109, 87, 95, 92, 94, 89, 84, 81, 80, 83, 91, 85, 82, 86,
90, 88, 93, 215, 223, 220, 222, 217, 212, 209, 208, 211, 219, 213, 210, 214, 218, 216,
221, 183, 191, 188, 190, 185, 180, 177, 176, 179, 187, 181, 178, 182, 186, 184, 189,
151, 159, 156, 158, 153, 148, 145, 144, 147, 155, 149, 146, 150, 154, 152, 157, 55, 63,
60, 62, 57, 52, 49, 48, 51, 59, 53, 50, 54, 58, 56, 61,
],
[
119, 118, 116, 123, 121, 124, 114, 122, 113, 120, 112, 126, 127, 125, 115, 117, 103,
102, 100, 107, 105, 108, 98, 106, 97, 104, 96, 110, 111, 109, 99, 101, 39, 38, 36, 43,
41, 44, 34, 42, 33, 40, 32, 46, 47, 45, 35, 37, 71, 70, 68, 75, 73, 76, 66, 74, 65, 72,
64, 78, 79, 77, 67, 69, 215, 214, 212, 219, 217, 220, 210, 218, 209, 216, 208, 222,
223, 221, 211, 213, 151, 150, 148, 155, 153, 156, 146, 154, 145, 152, 144, 158, 159,
157, 147, 149, 247, 246, 244, 251, 249, 252, 242, 250, 241, 248, 240, 254, 255, 253,
243, 245, 7, 6, 4, 11, 9, 12, 2, 10, 1, 8, 0, 14, 15, 13, 3, 5, 167, 166, 164, 171,
169, 172, 162, 170, 161, 168, 160, 174, 175, 173, 163, 165, 23, 22, 20, 27, 25, 28, 18,
26, 17, 24, 16, 30, 31, 29, 19, 21, 87, 86, 84, 91, 89, 92, 82, 90, 81, 88, 80, 94, 95,
93, 83, 85, 183, 182, 180, 187, 185, 188, 178, 186, 177, 184, 176, 190, 191, 189, 179,
181, 135, 134, 132, 139, 137, 140, 130, 138, 129, 136, 128, 142, 143, 141, 131, 133,
231, 230, 228, 235, 233, 236, 226, 234, 225, 232, 224, 238, 239, 237, 227, 229, 199,
198, 196, 203, 201, 204, 194, 202, 193, 200, 192, 206, 207, 205, 195, 197, 55, 54, 52,
59, 57, 60, 50, 58, 49, 56, 48, 62, 63, 61, 51, 53,
],
[
29, 30, 20, 17, 23, 16, 21, 26, 19, 28, 24, 31, 22, 18, 25, 27, 61, 62, 52, 49, 55, 48,
53, 58, 51, 60, 56, 63, 54, 50, 57, 59, 173, 174, 164, 161, 167, 160, 165, 170, 163,
172, 168, 175, 166, 162, 169, 171, 157, 158, 148, 145, 151, 144, 149, 154, 147, 156,
152, 159, 150, 146, 153, 155, 93, 94, 84, 81, 87, 80, 85, 90, 83, 92, 88, 95, 86, 82,
89, 91, 189, 190, 180, 177, 183, 176, 181, 186, 179, 188, 184, 191, 182, 178, 185, 187,
77, 78, 68, 65, 71, 64, 69, 74, 67, 76, 72, 79, 70, 66, 73, 75, 253, 254, 244, 241,
247, 240, 245, 250, 243, 252, 248, 255, 246, 242, 249, 251, 141, 142, 132, 129, 135,
128, 133, 138, 131, 140, 136, 143, 134, 130, 137, 139, 109, 110, 100, 97, 103, 96, 101,
106, 99, 108, 104, 111, 102, 98, 105, 107, 125, 126, 116, 113, 119, 112, 117, 122, 115,
124, 120, 127, 118, 114, 121, 123, 237, 238, 228, 225, 231, 224, 229, 234, 227, 236,
232, 239, 230, 226, 233, 235, 221, 222, 212, 209, 215, 208, 213, 218, 211, 220, 216,
223, 214, 210, 217, 219, 13, 14, 4, 1, 7, 0, 5, 10, 3, 12, 8, 15, 6, 2, 9, 11, 45, 46,
36, 33, 39, 32, 37, 42, 35, 44, 40, 47, 38, 34, 41, 43, 205, 206, 196, 193, 199, 192,
197, 202, 195, 204, 200, 207, 198, 194, 201, 203,
],
];
const SBOX: SmallSbox = [
[10, 4, 5, 6, 8, 1, 3, 7, 13, 12, 14, 0, 9, 2, 11, 15],
[5, 15, 4, 0, 2, 13, 11, 9, 1, 7, 6, 3, 12, 14, 10, 8],
[7, 15, 12, 14, 9, 4, 1, 0, 3, 11, 5, 2, 6, 10, 8, 13],
[4, 10, 7, 12, 0, 15, 2, 8, 14, 1, 6, 5, 13, 11, 9, 3],
[7, 6, 4, 11, 9, 12, 2, 10, 1, 8, 0, 14, 15, 13, 3, 5],
[7, 6, 2, 4, 13, 9, 15, 0, 10, 1, 5, 11, 8, 14, 12, 3],
[13, 14, 4, 1, 7, 0, 5, 10, 3, 12, 8, 15, 6, 2, 9, 11],
[1, 3, 10, 9, 5, 11, 4, 15, 8, 6, 7, 14, 13, 0, 2, 12],
];
}
#[cfg(test)]
mod tests {
use super::*;
fn test_sbox<S: Sbox>() {
let gen_sbox = S::gen_exp_sbox();
for i in 0..4 {
for j in 0..256 {
assert_eq!(gen_sbox[i][j], S::EXP_SBOX[i][j]);
}
}
}
#[test]
fn test_sboxes() {
test_sbox::<Tc26>();
test_sbox::<TestSbox>();
test_sbox::<CryptoProA>();
test_sbox::<CryptoProB>();
test_sbox::<CryptoProC>();
test_sbox::<CryptoProD>();
}
}
| 66.916811 | 99 | 0.475227 |
4be8fdbc3dd75aa8bf3b2c632f787628d129daef | 8,666 | use std::{sync::Arc, time::Instant};
use crate::data_source::MappingABI;
use crate::{
capabilities::NodeCapabilities, network::EthereumNetworkAdapters, Chain, DataSource,
EthereumAdapter, EthereumAdapterTrait, EthereumContractCall, EthereumContractCallError,
};
use anyhow::{Context, Error};
use blockchain::HostFn;
use graph::runtime::gas::Gas;
use graph::runtime::{AscIndexId, IndexForAscTypeId};
use graph::{
blockchain::{self, BlockPtr, HostFnCtx},
cheap_clone::CheapClone,
prelude::{
ethabi::{self, Address, Token},
EthereumCallCache, Future01CompatExt,
},
runtime::{asc_get, asc_new, AscPtr, HostExportError},
semver::Version,
slog::{info, trace, Logger},
};
use graph_runtime_wasm::asc_abi::class::{AscEnumArray, EthereumValueKind};
use super::abi::{AscUnresolvedContractCall, AscUnresolvedContractCall_0_0_4};
// Allow up to 1,000 ethereum calls. The justification is that we don't know how much Ethereum gas a
// call takes, but we limit the maximum to 25 million. One unit of Ethereum gas is at least 100ns
// according to these benchmarks [1], so 1000 of our gas. Assuming the worst case, an Ethereum call
// should therefore consume 25 billion gas. This allows for 400 calls per handler with the current
// limits.
//
// [1] - https://www.sciencedirect.com/science/article/abs/pii/S0166531620300900
pub const ETHEREUM_CALL: Gas = Gas::new(25_000_000_000);
pub struct RuntimeAdapter {
pub(crate) eth_adapters: Arc<EthereumNetworkAdapters>,
pub(crate) call_cache: Arc<dyn EthereumCallCache>,
}
impl blockchain::RuntimeAdapter<Chain> for RuntimeAdapter {
fn host_fns(&self, ds: &DataSource) -> Result<Vec<HostFn>, Error> {
let abis = ds.mapping.abis.clone();
let call_cache = self.call_cache.cheap_clone();
let eth_adapter = self
.eth_adapters
.cheapest_with(&NodeCapabilities {
archive: ds.mapping.requires_archive()?,
traces: false,
})?
.cheap_clone();
let ethereum_call = HostFn {
name: "ethereum.call",
func: Arc::new(move |ctx, wasm_ptr| {
ethereum_call(ð_adapter, call_cache.cheap_clone(), ctx, wasm_ptr, &abis)
.map(|ptr| ptr.wasm_ptr())
}),
};
Ok(vec![ethereum_call])
}
}
/// function ethereum.call(call: SmartContractCall): Array<Token> | null
fn ethereum_call(
eth_adapter: &EthereumAdapter,
call_cache: Arc<dyn EthereumCallCache>,
ctx: HostFnCtx<'_>,
wasm_ptr: u32,
abis: &[Arc<MappingABI>],
) -> Result<AscEnumArray<EthereumValueKind>, HostExportError> {
ctx.gas.consume_host_fn(ETHEREUM_CALL)?;
// For apiVersion >= 0.0.4 the call passed from the mapping includes the
// function signature; subgraphs using an apiVersion < 0.0.4 don't pass
// the signature along with the call.
let call: UnresolvedContractCall = if ctx.heap.api_version() >= Version::new(0, 0, 4) {
asc_get::<_, AscUnresolvedContractCall_0_0_4, _>(ctx.heap, wasm_ptr.into())?
} else {
asc_get::<_, AscUnresolvedContractCall, _>(ctx.heap, wasm_ptr.into())?
};
let result = eth_call(
eth_adapter,
call_cache,
&ctx.logger,
&ctx.block_ptr,
call,
abis,
)?;
match result {
Some(tokens) => Ok(asc_new(ctx.heap, tokens.as_slice())?),
None => Ok(AscPtr::null()),
}
}
/// Returns `Ok(None)` if the call was reverted.
fn eth_call(
eth_adapter: &EthereumAdapter,
call_cache: Arc<dyn EthereumCallCache>,
logger: &Logger,
block_ptr: &BlockPtr,
unresolved_call: UnresolvedContractCall,
abis: &[Arc<MappingABI>],
) -> Result<Option<Vec<Token>>, HostExportError> {
let start_time = Instant::now();
// Obtain the path to the contract ABI
let contract = abis
.iter()
.find(|abi| abi.name == unresolved_call.contract_name)
.with_context(|| {
format!(
"Could not find ABI for contract \"{}\", try adding it to the 'abis' section \
of the subgraph manifest",
unresolved_call.contract_name
)
})?
.contract
.clone();
let function = match unresolved_call.function_signature {
// Behavior for apiVersion < 0.0.4: look up function by name; for overloaded
// functions this always picks the same overloaded variant, which is incorrect
// and may lead to encoding/decoding errors
None => contract
.function(unresolved_call.function_name.as_str())
.with_context(|| {
format!(
"Unknown function \"{}::{}\" called from WASM runtime",
unresolved_call.contract_name, unresolved_call.function_name
)
})?,
// Behavior for apiVersion >= 0.0.04: look up function by signature of
// the form `functionName(uint256,string) returns (bytes32,string)`; this
// correctly picks the correct variant of an overloaded function
Some(ref function_signature) => contract
.functions_by_name(unresolved_call.function_name.as_str())
.with_context(|| {
format!(
"Unknown function \"{}::{}\" called from WASM runtime",
unresolved_call.contract_name, unresolved_call.function_name
)
})?
.iter()
.find(|f| function_signature == &f.signature())
.with_context(|| {
format!(
"Unknown function \"{}::{}\" with signature `{}` \
called from WASM runtime",
unresolved_call.contract_name,
unresolved_call.function_name,
function_signature,
)
})?,
};
let call = EthereumContractCall {
address: unresolved_call.contract_address,
block_ptr: block_ptr.cheap_clone(),
function: function.clone(),
args: unresolved_call.function_args.clone(),
};
// Run Ethereum call in tokio runtime
let logger1 = logger.clone();
let call_cache = call_cache.clone();
let result = match graph::block_on(
eth_adapter.contract_call(&logger1, call, call_cache).compat()
) {
Ok(tokens) => Ok(Some(tokens)),
Err(EthereumContractCallError::Revert(reason)) => {
info!(logger, "Contract call reverted"; "reason" => reason);
Ok(None)
}
// Any error reported by the Ethereum node could be due to the block no longer being on
// the main chain. This is very unespecific but we don't want to risk failing a
// subgraph due to a transient error such as a reorg.
Err(EthereumContractCallError::Web3Error(e)) => Err(HostExportError::PossibleReorg(anyhow::anyhow!(
"Ethereum node returned an error when calling function \"{}\" of contract \"{}\": {}",
unresolved_call.function_name,
unresolved_call.contract_name,
e
))),
// Also retry on timeouts.
Err(EthereumContractCallError::Timeout) => Err(HostExportError::PossibleReorg(anyhow::anyhow!(
"Ethereum node did not respond when calling function \"{}\" of contract \"{}\"",
unresolved_call.function_name,
unresolved_call.contract_name,
))),
Err(e) => Err(HostExportError::Unknown(anyhow::anyhow!(
"Failed to call function \"{}\" of contract \"{}\": {}",
unresolved_call.function_name,
unresolved_call.contract_name,
e
))),
};
trace!(logger, "Contract call finished";
"address" => &unresolved_call.contract_address.to_string(),
"contract" => &unresolved_call.contract_name,
"function" => &unresolved_call.function_name,
"function_signature" => &unresolved_call.function_signature,
"time" => format!("{}ms", start_time.elapsed().as_millis()));
result
}
#[derive(Clone, Debug)]
pub struct UnresolvedContractCall {
pub contract_name: String,
pub contract_address: Address,
pub function_name: String,
pub function_signature: Option<String>,
pub function_args: Vec<ethabi::Token>,
}
impl AscIndexId for AscUnresolvedContractCall {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::SmartContractCall;
}
| 38.345133 | 111 | 0.614124 |
03f9dd58fdccd38706d30981a328185533215690 | 2,607 | // Copyright 2019 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any
// person obtaining a copy of this software and associated
// documentation files (the "Software"), to deal in the
// Software without restriction, including without
// limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software
// is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice
// shall be included in all copies or substantial portions
// of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
// ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
// TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
// PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
// SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use crate::raw::server::RawServerRequest;
use crate::transport::TransportServer;
use core::{hash::Hash, marker::PhantomData};
/// Allows responding to a server request in a more elegant and strongly-typed fashion.
pub struct TypedResponder<'a, R, I, T> {
/// The request to answer.
rq: RawServerRequest<'a, R, I>,
/// Marker that pins the type of the response.
response_ty: PhantomData<T>,
}
impl<'a, R, I, T> From<RawServerRequest<'a, R, I>> for TypedResponder<'a, R, I, T> {
fn from(rq: RawServerRequest<'a, R, I>) -> TypedResponder<'a, R, I, T> {
TypedResponder {
rq,
response_ty: PhantomData,
}
}
}
impl<'a, R, I, T> TypedResponder<'a, R, I, T>
where
R: TransportServer<RequestId = I>,
I: Clone + PartialEq + Eq + Hash + Send + Sync,
T: serde::Serialize,
{
/// Returns a successful response.
pub fn ok(self, response: impl Into<T>) {
self.respond(Ok(response))
}
/// Returns an erroneous response.
pub fn err(self, err: crate::common::Error) {
self.respond(Err::<T, _>(err))
}
/// Returns a response.
pub fn respond(self, response: Result<impl Into<T>, crate::common::Error>) {
let response = match response {
Ok(v) => crate::common::to_value(v.into())
.map_err(|_| crate::common::Error::internal_error()),
Err(err) => Err(err),
};
self.rq.respond(response)
}
}
| 34.302632 | 87 | 0.663982 |
5be426eb38278be39a7d32b42efd6a83a02cd975 | 311 | // FIXME: missing sysroot spans (#53081)
// ignore-i586-unknown-linux-gnu
// ignore-i586-unknown-linux-musl
// ignore-i686-unknown-linux-musl
struct R<'a> {
r: &'a R<'a>,
}
fn foo(res: Result<u32, &R>) -> u32 {
let Ok(x) = res;
//~^ ERROR refutable pattern
x
}
fn main() {
foo(Ok(23));
}
| 16.368421 | 40 | 0.588424 |
abc349da88a2a43d20ca39fd46d162b0b5cd6b54 | 820 | //! Hash serialization with validation
use crate::{hash::Algorithm, Hash};
use serde::{Deserialize, Deserializer, Serializer};
use subtle_encoding::hex;
/// Deserialize hexstring into Hash
pub fn deserialize<'de, D>(deserializer: D) -> Result<Hash, D::Error>
where
D: Deserializer<'de>,
{
let hexstring: String = Option::<String>::deserialize(deserializer)?.unwrap_or_default();
Hash::from_hex_upper(Algorithm::Sha256, hexstring.as_str()).map_err(serde::de::Error::custom)
}
/// Serialize from Hash into hexstring
pub fn serialize<S>(value: &Hash, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let hex_bytes = hex::encode_upper(value.as_bytes());
let hex_string = String::from_utf8(hex_bytes).map_err(serde::ser::Error::custom)?;
serializer.serialize_str(&hex_string)
}
| 32.8 | 97 | 0.718293 |
fe22c9065e8b5524be05253b215b7542a3b9f6d4 | 1,087 | use crate::commands::WholeStreamCommand;
use crate::errors::ShellError;
use crate::parser::CommandRegistry;
use crate::prelude::*;
#[derive(Deserialize)]
struct AppendArgs {
row: Tagged<Value>,
}
pub struct Append;
impl WholeStreamCommand for Append {
fn name(&self) -> &str {
"append"
}
fn signature(&self) -> Signature {
Signature::build("append").required(
"row value",
SyntaxShape::Any,
"the value of the row to append to the table",
)
}
fn usage(&self) -> &str {
"Append the given row to the table"
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, append)?.run()
}
}
fn append(
AppendArgs { row }: AppendArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let mut after: VecDeque<Tagged<Value>> = VecDeque::new();
after.push_back(row);
Ok(OutputStream::from_input(input.values.chain(after)))
}
| 22.645833 | 61 | 0.609936 |
2178061926b76e29b3ade6348233eaea7db6ac76 | 1,915 | use std::{fs, path::Path};
fn read_input<T>(path: T) -> Result<Vec<Vec<u8>>, String>
where
T: AsRef<Path>,
{
let content = fs::read_to_string(path.as_ref()).map_err(|_| "Could not read file")?;
Ok(content.lines().map(parse_number).collect())
}
fn parse_number(line: &str) -> Vec<u8> {
line.bytes().map(|x| x - 48).collect()
}
enum FilterCriteria {
MostCommon,
LeastCommon,
}
fn filter_by_criteria(
lines: Vec<Vec<u8>>,
position: usize,
filter_criteria: FilterCriteria,
) -> Vec<Vec<u8>> {
if lines.len() == 1 {
return lines;
}
let len: usize = lines.len();
let sum: usize = lines.iter().map(|x| x[position] as usize).sum();
let filter_bit: u8 = match filter_criteria {
FilterCriteria::MostCommon => {
if 2 * sum >= len {
1
} else {
0
}
}
FilterCriteria::LeastCommon => {
if 2 * sum >= len {
0
} else {
1
}
}
};
let new = lines
.into_iter()
.filter(|x| x[position] == filter_bit)
.collect::<Vec<_>>();
filter_by_criteria(new, position + 1, filter_criteria)
}
fn find_gamma(lines: &Vec<Vec<u8>>) -> isize {
bin_vec_to_number(&filter_by_criteria(lines.clone(), 0, FilterCriteria::MostCommon)[0])
}
fn find_epsilon(lines: &Vec<Vec<u8>>) -> isize {
bin_vec_to_number(&filter_by_criteria(lines.clone(), 0, FilterCriteria::LeastCommon)[0])
}
fn bin_vec_to_number(bitvector: &[u8]) -> isize {
bitvector.iter().fold(0, |acc, bit| acc * 2 + *bit as isize)
}
fn main() -> Result<(), &'static str> {
let input =
read_input("inputs/03b-binary-diagnostic.input").map_err(|_| "Could not read file")?;
let gamma = find_gamma(&input);
let epsilon = find_epsilon(&input);
println!("{}", gamma * epsilon);
Ok(())
}
| 25.197368 | 93 | 0.562924 |
fbb2279552acf97eff797b8f97bd2cefd1db94e3 | 27,734 | #[doc = "Reader of register SEC_CTRL_ROM_MEM_RULE3"]
pub type R = crate::R<u32, super::SEC_CTRL_ROM_MEM_RULE3>;
#[doc = "Writer for register SEC_CTRL_ROM_MEM_RULE3"]
pub type W = crate::W<u32, super::SEC_CTRL_ROM_MEM_RULE3>;
#[doc = "Register SEC_CTRL_ROM_MEM_RULE3 `reset()`'s with value 0"]
impl crate::ResetValue for super::SEC_CTRL_ROM_MEM_RULE3 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Possible values of the field `RULE0`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RULE0_A {
#[doc = "Non-secure and Non-priviledge user access allowed."]
ENUM_NS_NP,
#[doc = "Non-secure and Privilege access allowed."]
ENUM_NS_P,
#[doc = "Secure and Non-priviledge user access allowed."]
ENUM_S_NP,
#[doc = "Secure and Priviledge user access allowed."]
ENUM_S_P,
}
impl From<RULE0_A> for u8 {
#[inline(always)]
fn from(variant: RULE0_A) -> Self {
match variant {
RULE0_A::ENUM_NS_NP => 0,
RULE0_A::ENUM_NS_P => 1,
RULE0_A::ENUM_S_NP => 2,
RULE0_A::ENUM_S_P => 3,
}
}
}
#[doc = "Reader of field `RULE0`"]
pub type RULE0_R = crate::R<u8, RULE0_A>;
impl RULE0_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RULE0_A {
match self.bits {
0 => RULE0_A::ENUM_NS_NP,
1 => RULE0_A::ENUM_NS_P,
2 => RULE0_A::ENUM_S_NP,
3 => RULE0_A::ENUM_S_P,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `ENUM_NS_NP`"]
#[inline(always)]
pub fn is_enum_ns_np(&self) -> bool {
*self == RULE0_A::ENUM_NS_NP
}
#[doc = "Checks if the value of the field is `ENUM_NS_P`"]
#[inline(always)]
pub fn is_enum_ns_p(&self) -> bool {
*self == RULE0_A::ENUM_NS_P
}
#[doc = "Checks if the value of the field is `ENUM_S_NP`"]
#[inline(always)]
pub fn is_enum_s_np(&self) -> bool {
*self == RULE0_A::ENUM_S_NP
}
#[doc = "Checks if the value of the field is `ENUM_S_P`"]
#[inline(always)]
pub fn is_enum_s_p(&self) -> bool {
*self == RULE0_A::ENUM_S_P
}
}
#[doc = "Write proxy for field `RULE0`"]
pub struct RULE0_W<'a> {
w: &'a mut W,
}
impl<'a> RULE0_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RULE0_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Non-secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_ns_np(self) -> &'a mut W {
self.variant(RULE0_A::ENUM_NS_NP)
}
#[doc = "Non-secure and Privilege access allowed."]
#[inline(always)]
pub fn enum_ns_p(self) -> &'a mut W {
self.variant(RULE0_A::ENUM_NS_P)
}
#[doc = "Secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_np(self) -> &'a mut W {
self.variant(RULE0_A::ENUM_S_NP)
}
#[doc = "Secure and Priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_p(self) -> &'a mut W {
self.variant(RULE0_A::ENUM_S_P)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03);
self.w
}
}
#[doc = "Possible values of the field `RULE1`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RULE1_A {
#[doc = "Non-secure and Non-priviledge user access allowed."]
ENUM_NS_NP,
#[doc = "Non-secure and Privilege access allowed."]
ENUM_NS_P,
#[doc = "Secure and Non-priviledge user access allowed."]
ENUM_S_NP,
#[doc = "Secure and Priviledge user access allowed."]
ENUM_S_P,
}
impl From<RULE1_A> for u8 {
#[inline(always)]
fn from(variant: RULE1_A) -> Self {
match variant {
RULE1_A::ENUM_NS_NP => 0,
RULE1_A::ENUM_NS_P => 1,
RULE1_A::ENUM_S_NP => 2,
RULE1_A::ENUM_S_P => 3,
}
}
}
#[doc = "Reader of field `RULE1`"]
pub type RULE1_R = crate::R<u8, RULE1_A>;
impl RULE1_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RULE1_A {
match self.bits {
0 => RULE1_A::ENUM_NS_NP,
1 => RULE1_A::ENUM_NS_P,
2 => RULE1_A::ENUM_S_NP,
3 => RULE1_A::ENUM_S_P,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `ENUM_NS_NP`"]
#[inline(always)]
pub fn is_enum_ns_np(&self) -> bool {
*self == RULE1_A::ENUM_NS_NP
}
#[doc = "Checks if the value of the field is `ENUM_NS_P`"]
#[inline(always)]
pub fn is_enum_ns_p(&self) -> bool {
*self == RULE1_A::ENUM_NS_P
}
#[doc = "Checks if the value of the field is `ENUM_S_NP`"]
#[inline(always)]
pub fn is_enum_s_np(&self) -> bool {
*self == RULE1_A::ENUM_S_NP
}
#[doc = "Checks if the value of the field is `ENUM_S_P`"]
#[inline(always)]
pub fn is_enum_s_p(&self) -> bool {
*self == RULE1_A::ENUM_S_P
}
}
#[doc = "Write proxy for field `RULE1`"]
pub struct RULE1_W<'a> {
w: &'a mut W,
}
impl<'a> RULE1_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RULE1_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Non-secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_ns_np(self) -> &'a mut W {
self.variant(RULE1_A::ENUM_NS_NP)
}
#[doc = "Non-secure and Privilege access allowed."]
#[inline(always)]
pub fn enum_ns_p(self) -> &'a mut W {
self.variant(RULE1_A::ENUM_NS_P)
}
#[doc = "Secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_np(self) -> &'a mut W {
self.variant(RULE1_A::ENUM_S_NP)
}
#[doc = "Secure and Priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_p(self) -> &'a mut W {
self.variant(RULE1_A::ENUM_S_P)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 4)) | (((value as u32) & 0x03) << 4);
self.w
}
}
#[doc = "Possible values of the field `RULE2`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RULE2_A {
#[doc = "Non-secure and Non-priviledge user access allowed."]
ENUM_NS_NP,
#[doc = "Non-secure and Privilege access allowed."]
ENUM_NS_P,
#[doc = "Secure and Non-priviledge user access allowed."]
ENUM_S_NP,
#[doc = "Secure and Priviledge user access allowed."]
ENUM_S_P,
}
impl From<RULE2_A> for u8 {
#[inline(always)]
fn from(variant: RULE2_A) -> Self {
match variant {
RULE2_A::ENUM_NS_NP => 0,
RULE2_A::ENUM_NS_P => 1,
RULE2_A::ENUM_S_NP => 2,
RULE2_A::ENUM_S_P => 3,
}
}
}
#[doc = "Reader of field `RULE2`"]
pub type RULE2_R = crate::R<u8, RULE2_A>;
impl RULE2_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RULE2_A {
match self.bits {
0 => RULE2_A::ENUM_NS_NP,
1 => RULE2_A::ENUM_NS_P,
2 => RULE2_A::ENUM_S_NP,
3 => RULE2_A::ENUM_S_P,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `ENUM_NS_NP`"]
#[inline(always)]
pub fn is_enum_ns_np(&self) -> bool {
*self == RULE2_A::ENUM_NS_NP
}
#[doc = "Checks if the value of the field is `ENUM_NS_P`"]
#[inline(always)]
pub fn is_enum_ns_p(&self) -> bool {
*self == RULE2_A::ENUM_NS_P
}
#[doc = "Checks if the value of the field is `ENUM_S_NP`"]
#[inline(always)]
pub fn is_enum_s_np(&self) -> bool {
*self == RULE2_A::ENUM_S_NP
}
#[doc = "Checks if the value of the field is `ENUM_S_P`"]
#[inline(always)]
pub fn is_enum_s_p(&self) -> bool {
*self == RULE2_A::ENUM_S_P
}
}
#[doc = "Write proxy for field `RULE2`"]
pub struct RULE2_W<'a> {
w: &'a mut W,
}
impl<'a> RULE2_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RULE2_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Non-secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_ns_np(self) -> &'a mut W {
self.variant(RULE2_A::ENUM_NS_NP)
}
#[doc = "Non-secure and Privilege access allowed."]
#[inline(always)]
pub fn enum_ns_p(self) -> &'a mut W {
self.variant(RULE2_A::ENUM_NS_P)
}
#[doc = "Secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_np(self) -> &'a mut W {
self.variant(RULE2_A::ENUM_S_NP)
}
#[doc = "Secure and Priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_p(self) -> &'a mut W {
self.variant(RULE2_A::ENUM_S_P)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8);
self.w
}
}
#[doc = "Possible values of the field `RULE3`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RULE3_A {
#[doc = "Non-secure and Non-priviledge user access allowed."]
ENUM_NS_NP,
#[doc = "Non-secure and Privilege access allowed."]
ENUM_NS_P,
#[doc = "Secure and Non-priviledge user access allowed."]
ENUM_S_NP,
#[doc = "Secure and Priviledge user access allowed."]
ENUM_S_P,
}
impl From<RULE3_A> for u8 {
#[inline(always)]
fn from(variant: RULE3_A) -> Self {
match variant {
RULE3_A::ENUM_NS_NP => 0,
RULE3_A::ENUM_NS_P => 1,
RULE3_A::ENUM_S_NP => 2,
RULE3_A::ENUM_S_P => 3,
}
}
}
#[doc = "Reader of field `RULE3`"]
pub type RULE3_R = crate::R<u8, RULE3_A>;
impl RULE3_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RULE3_A {
match self.bits {
0 => RULE3_A::ENUM_NS_NP,
1 => RULE3_A::ENUM_NS_P,
2 => RULE3_A::ENUM_S_NP,
3 => RULE3_A::ENUM_S_P,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `ENUM_NS_NP`"]
#[inline(always)]
pub fn is_enum_ns_np(&self) -> bool {
*self == RULE3_A::ENUM_NS_NP
}
#[doc = "Checks if the value of the field is `ENUM_NS_P`"]
#[inline(always)]
pub fn is_enum_ns_p(&self) -> bool {
*self == RULE3_A::ENUM_NS_P
}
#[doc = "Checks if the value of the field is `ENUM_S_NP`"]
#[inline(always)]
pub fn is_enum_s_np(&self) -> bool {
*self == RULE3_A::ENUM_S_NP
}
#[doc = "Checks if the value of the field is `ENUM_S_P`"]
#[inline(always)]
pub fn is_enum_s_p(&self) -> bool {
*self == RULE3_A::ENUM_S_P
}
}
#[doc = "Write proxy for field `RULE3`"]
pub struct RULE3_W<'a> {
w: &'a mut W,
}
impl<'a> RULE3_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RULE3_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Non-secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_ns_np(self) -> &'a mut W {
self.variant(RULE3_A::ENUM_NS_NP)
}
#[doc = "Non-secure and Privilege access allowed."]
#[inline(always)]
pub fn enum_ns_p(self) -> &'a mut W {
self.variant(RULE3_A::ENUM_NS_P)
}
#[doc = "Secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_np(self) -> &'a mut W {
self.variant(RULE3_A::ENUM_S_NP)
}
#[doc = "Secure and Priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_p(self) -> &'a mut W {
self.variant(RULE3_A::ENUM_S_P)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 12)) | (((value as u32) & 0x03) << 12);
self.w
}
}
#[doc = "Possible values of the field `RULE4`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RULE4_A {
#[doc = "Non-secure and Non-priviledge user access allowed."]
ENUM_NS_NP,
#[doc = "Non-secure and Privilege access allowed."]
ENUM_NS_P,
#[doc = "Secure and Non-priviledge user access allowed."]
ENUM_S_NP,
#[doc = "Secure and Priviledge user access allowed."]
ENUM_S_P,
}
impl From<RULE4_A> for u8 {
#[inline(always)]
fn from(variant: RULE4_A) -> Self {
match variant {
RULE4_A::ENUM_NS_NP => 0,
RULE4_A::ENUM_NS_P => 1,
RULE4_A::ENUM_S_NP => 2,
RULE4_A::ENUM_S_P => 3,
}
}
}
#[doc = "Reader of field `RULE4`"]
pub type RULE4_R = crate::R<u8, RULE4_A>;
impl RULE4_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RULE4_A {
match self.bits {
0 => RULE4_A::ENUM_NS_NP,
1 => RULE4_A::ENUM_NS_P,
2 => RULE4_A::ENUM_S_NP,
3 => RULE4_A::ENUM_S_P,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `ENUM_NS_NP`"]
#[inline(always)]
pub fn is_enum_ns_np(&self) -> bool {
*self == RULE4_A::ENUM_NS_NP
}
#[doc = "Checks if the value of the field is `ENUM_NS_P`"]
#[inline(always)]
pub fn is_enum_ns_p(&self) -> bool {
*self == RULE4_A::ENUM_NS_P
}
#[doc = "Checks if the value of the field is `ENUM_S_NP`"]
#[inline(always)]
pub fn is_enum_s_np(&self) -> bool {
*self == RULE4_A::ENUM_S_NP
}
#[doc = "Checks if the value of the field is `ENUM_S_P`"]
#[inline(always)]
pub fn is_enum_s_p(&self) -> bool {
*self == RULE4_A::ENUM_S_P
}
}
#[doc = "Write proxy for field `RULE4`"]
pub struct RULE4_W<'a> {
w: &'a mut W,
}
impl<'a> RULE4_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RULE4_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Non-secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_ns_np(self) -> &'a mut W {
self.variant(RULE4_A::ENUM_NS_NP)
}
#[doc = "Non-secure and Privilege access allowed."]
#[inline(always)]
pub fn enum_ns_p(self) -> &'a mut W {
self.variant(RULE4_A::ENUM_NS_P)
}
#[doc = "Secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_np(self) -> &'a mut W {
self.variant(RULE4_A::ENUM_S_NP)
}
#[doc = "Secure and Priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_p(self) -> &'a mut W {
self.variant(RULE4_A::ENUM_S_P)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 16)) | (((value as u32) & 0x03) << 16);
self.w
}
}
#[doc = "Possible values of the field `RULE5`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RULE5_A {
#[doc = "Non-secure and Non-priviledge user access allowed."]
ENUM_NS_NP,
#[doc = "Non-secure and Privilege access allowed."]
ENUM_NS_P,
#[doc = "Secure and Non-priviledge user access allowed."]
ENUM_S_NP,
#[doc = "Secure and Priviledge user access allowed."]
ENUM_S_P,
}
impl From<RULE5_A> for u8 {
#[inline(always)]
fn from(variant: RULE5_A) -> Self {
match variant {
RULE5_A::ENUM_NS_NP => 0,
RULE5_A::ENUM_NS_P => 1,
RULE5_A::ENUM_S_NP => 2,
RULE5_A::ENUM_S_P => 3,
}
}
}
#[doc = "Reader of field `RULE5`"]
pub type RULE5_R = crate::R<u8, RULE5_A>;
impl RULE5_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RULE5_A {
match self.bits {
0 => RULE5_A::ENUM_NS_NP,
1 => RULE5_A::ENUM_NS_P,
2 => RULE5_A::ENUM_S_NP,
3 => RULE5_A::ENUM_S_P,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `ENUM_NS_NP`"]
#[inline(always)]
pub fn is_enum_ns_np(&self) -> bool {
*self == RULE5_A::ENUM_NS_NP
}
#[doc = "Checks if the value of the field is `ENUM_NS_P`"]
#[inline(always)]
pub fn is_enum_ns_p(&self) -> bool {
*self == RULE5_A::ENUM_NS_P
}
#[doc = "Checks if the value of the field is `ENUM_S_NP`"]
#[inline(always)]
pub fn is_enum_s_np(&self) -> bool {
*self == RULE5_A::ENUM_S_NP
}
#[doc = "Checks if the value of the field is `ENUM_S_P`"]
#[inline(always)]
pub fn is_enum_s_p(&self) -> bool {
*self == RULE5_A::ENUM_S_P
}
}
#[doc = "Write proxy for field `RULE5`"]
pub struct RULE5_W<'a> {
w: &'a mut W,
}
impl<'a> RULE5_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RULE5_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Non-secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_ns_np(self) -> &'a mut W {
self.variant(RULE5_A::ENUM_NS_NP)
}
#[doc = "Non-secure and Privilege access allowed."]
#[inline(always)]
pub fn enum_ns_p(self) -> &'a mut W {
self.variant(RULE5_A::ENUM_NS_P)
}
#[doc = "Secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_np(self) -> &'a mut W {
self.variant(RULE5_A::ENUM_S_NP)
}
#[doc = "Secure and Priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_p(self) -> &'a mut W {
self.variant(RULE5_A::ENUM_S_P)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 20)) | (((value as u32) & 0x03) << 20);
self.w
}
}
#[doc = "Possible values of the field `RULE6`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RULE6_A {
#[doc = "Non-secure and Non-priviledge user access allowed."]
ENUM_NS_NP,
#[doc = "Non-secure and Privilege access allowed."]
ENUM_NS_P,
#[doc = "Secure and Non-priviledge user access allowed."]
ENUM_S_NP,
#[doc = "Secure and Priviledge user access allowed."]
ENUM_S_P,
}
impl From<RULE6_A> for u8 {
#[inline(always)]
fn from(variant: RULE6_A) -> Self {
match variant {
RULE6_A::ENUM_NS_NP => 0,
RULE6_A::ENUM_NS_P => 1,
RULE6_A::ENUM_S_NP => 2,
RULE6_A::ENUM_S_P => 3,
}
}
}
#[doc = "Reader of field `RULE6`"]
pub type RULE6_R = crate::R<u8, RULE6_A>;
impl RULE6_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RULE6_A {
match self.bits {
0 => RULE6_A::ENUM_NS_NP,
1 => RULE6_A::ENUM_NS_P,
2 => RULE6_A::ENUM_S_NP,
3 => RULE6_A::ENUM_S_P,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `ENUM_NS_NP`"]
#[inline(always)]
pub fn is_enum_ns_np(&self) -> bool {
*self == RULE6_A::ENUM_NS_NP
}
#[doc = "Checks if the value of the field is `ENUM_NS_P`"]
#[inline(always)]
pub fn is_enum_ns_p(&self) -> bool {
*self == RULE6_A::ENUM_NS_P
}
#[doc = "Checks if the value of the field is `ENUM_S_NP`"]
#[inline(always)]
pub fn is_enum_s_np(&self) -> bool {
*self == RULE6_A::ENUM_S_NP
}
#[doc = "Checks if the value of the field is `ENUM_S_P`"]
#[inline(always)]
pub fn is_enum_s_p(&self) -> bool {
*self == RULE6_A::ENUM_S_P
}
}
#[doc = "Write proxy for field `RULE6`"]
pub struct RULE6_W<'a> {
w: &'a mut W,
}
impl<'a> RULE6_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RULE6_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Non-secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_ns_np(self) -> &'a mut W {
self.variant(RULE6_A::ENUM_NS_NP)
}
#[doc = "Non-secure and Privilege access allowed."]
#[inline(always)]
pub fn enum_ns_p(self) -> &'a mut W {
self.variant(RULE6_A::ENUM_NS_P)
}
#[doc = "Secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_np(self) -> &'a mut W {
self.variant(RULE6_A::ENUM_S_NP)
}
#[doc = "Secure and Priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_p(self) -> &'a mut W {
self.variant(RULE6_A::ENUM_S_P)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 24)) | (((value as u32) & 0x03) << 24);
self.w
}
}
#[doc = "Possible values of the field `RULE7`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RULE7_A {
#[doc = "Non-secure and Non-priviledge user access allowed."]
ENUM_NS_NP,
#[doc = "Non-secure and Privilege access allowed."]
ENUM_NS_P,
#[doc = "Secure and Non-priviledge user access allowed."]
ENUM_S_NP,
#[doc = "Secure and Priviledge user access allowed."]
ENUM_S_P,
}
impl From<RULE7_A> for u8 {
#[inline(always)]
fn from(variant: RULE7_A) -> Self {
match variant {
RULE7_A::ENUM_NS_NP => 0,
RULE7_A::ENUM_NS_P => 1,
RULE7_A::ENUM_S_NP => 2,
RULE7_A::ENUM_S_P => 3,
}
}
}
#[doc = "Reader of field `RULE7`"]
pub type RULE7_R = crate::R<u8, RULE7_A>;
impl RULE7_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RULE7_A {
match self.bits {
0 => RULE7_A::ENUM_NS_NP,
1 => RULE7_A::ENUM_NS_P,
2 => RULE7_A::ENUM_S_NP,
3 => RULE7_A::ENUM_S_P,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `ENUM_NS_NP`"]
#[inline(always)]
pub fn is_enum_ns_np(&self) -> bool {
*self == RULE7_A::ENUM_NS_NP
}
#[doc = "Checks if the value of the field is `ENUM_NS_P`"]
#[inline(always)]
pub fn is_enum_ns_p(&self) -> bool {
*self == RULE7_A::ENUM_NS_P
}
#[doc = "Checks if the value of the field is `ENUM_S_NP`"]
#[inline(always)]
pub fn is_enum_s_np(&self) -> bool {
*self == RULE7_A::ENUM_S_NP
}
#[doc = "Checks if the value of the field is `ENUM_S_P`"]
#[inline(always)]
pub fn is_enum_s_p(&self) -> bool {
*self == RULE7_A::ENUM_S_P
}
}
#[doc = "Write proxy for field `RULE7`"]
pub struct RULE7_W<'a> {
w: &'a mut W,
}
impl<'a> RULE7_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RULE7_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Non-secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_ns_np(self) -> &'a mut W {
self.variant(RULE7_A::ENUM_NS_NP)
}
#[doc = "Non-secure and Privilege access allowed."]
#[inline(always)]
pub fn enum_ns_p(self) -> &'a mut W {
self.variant(RULE7_A::ENUM_NS_P)
}
#[doc = "Secure and Non-priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_np(self) -> &'a mut W {
self.variant(RULE7_A::ENUM_S_NP)
}
#[doc = "Secure and Priviledge user access allowed."]
#[inline(always)]
pub fn enum_s_p(self) -> &'a mut W {
self.variant(RULE7_A::ENUM_S_P)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 28)) | (((value as u32) & 0x03) << 28);
self.w
}
}
impl R {
#[doc = "Bits 0:1 - secure control rule0. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule0(&self) -> RULE0_R {
RULE0_R::new((self.bits & 0x03) as u8)
}
#[doc = "Bits 4:5 - secure control rule1. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule1(&self) -> RULE1_R {
RULE1_R::new(((self.bits >> 4) & 0x03) as u8)
}
#[doc = "Bits 8:9 - secure control rule2. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule2(&self) -> RULE2_R {
RULE2_R::new(((self.bits >> 8) & 0x03) as u8)
}
#[doc = "Bits 12:13 - secure control rule3. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule3(&self) -> RULE3_R {
RULE3_R::new(((self.bits >> 12) & 0x03) as u8)
}
#[doc = "Bits 16:17 - secure control rule4. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule4(&self) -> RULE4_R {
RULE4_R::new(((self.bits >> 16) & 0x03) as u8)
}
#[doc = "Bits 20:21 - secure control rule5. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule5(&self) -> RULE5_R {
RULE5_R::new(((self.bits >> 20) & 0x03) as u8)
}
#[doc = "Bits 24:25 - secure control rule6. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule6(&self) -> RULE6_R {
RULE6_R::new(((self.bits >> 24) & 0x03) as u8)
}
#[doc = "Bits 28:29 - secure control rule7. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule7(&self) -> RULE7_R {
RULE7_R::new(((self.bits >> 28) & 0x03) as u8)
}
}
impl W {
#[doc = "Bits 0:1 - secure control rule0. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule0(&mut self) -> RULE0_W {
RULE0_W { w: self }
}
#[doc = "Bits 4:5 - secure control rule1. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule1(&mut self) -> RULE1_W {
RULE1_W { w: self }
}
#[doc = "Bits 8:9 - secure control rule2. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule2(&mut self) -> RULE2_W {
RULE2_W { w: self }
}
#[doc = "Bits 12:13 - secure control rule3. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule3(&mut self) -> RULE3_W {
RULE3_W { w: self }
}
#[doc = "Bits 16:17 - secure control rule4. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule4(&mut self) -> RULE4_W {
RULE4_W { w: self }
}
#[doc = "Bits 20:21 - secure control rule5. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule5(&mut self) -> RULE5_W {
RULE5_W { w: self }
}
#[doc = "Bits 24:25 - secure control rule6. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule6(&mut self) -> RULE6_W {
RULE6_W { w: self }
}
#[doc = "Bits 28:29 - secure control rule7. it can be set when check_reg's write_lock is '0'"]
#[inline(always)]
pub fn rule7(&mut self) -> RULE7_W {
RULE7_W { w: self }
}
}
| 31.768614 | 98 | 0.5684 |
6a3ee313aaf998bbf5af6ea0250b3955083e5187 | 3,537 | use std::io::BufRead;
use super::{
IsoBoxInfo,
BoxParsingError,
BoxReader,
BoxValue,
Flags,
IsoBoxEntry,
IsoBoxParser,
};
pub struct Tfhd {
version: u8,
flags: Flags,
track_id: u32,
base_data_offset: Option<u64>,
sample_description_index: Option<u32>,
default_sample_duration: Option<u32>,
default_sample_size: Option<u32>,
default_sample_flags: Option<u32>,
}
impl IsoBoxParser for Tfhd {
fn parse<T: BufRead>(
reader: &mut BoxReader<T>,
_content_size: Option<u64>,
_box_info: &std::rc::Rc<IsoBoxInfo>
) -> Result<Self, BoxParsingError> {
let version = reader.read_u8()?;
let flags = Flags::read(reader)?;
let flag_base_data_offset = flags.has_flag(0x000001);
let flag_sample_description_index = flags.has_flag(0x000002);
let flag_default_sample_duration = flags.has_flag(0x000008);
let flag_default_sample_size = flags.has_flag(0x000010);
let flag_default_sample_flags = flags.has_flag(0x000020);
// TODO indicate flags values in get_inner_values_ref
// let flag_duration_is_empty = flags.has_flag(0x010000);
// let flag_default_base_is_moof = flags.has_flag(0x020000);
let track_id = reader.read_u32()?;
let base_data_offset = if flag_base_data_offset {
Some(reader.read_u64()?)
} else { None };
let sample_description_index = if flag_sample_description_index {
Some(reader.read_u32()?)
} else { None };
let default_sample_duration = if flag_default_sample_duration {
Some(reader.read_u32()?)
} else { None };
let default_sample_size = if flag_default_sample_size {
Some(reader.read_u32()?)
} else { None };
let default_sample_flags = if flag_default_sample_flags {
Some(reader.read_u32()?)
} else { None };
Ok(Self {
version,
flags,
track_id,
base_data_offset,
sample_description_index,
default_sample_duration,
default_sample_size,
default_sample_flags,
})
}
fn get_inner_values_ref(&self) -> Vec<(&'static str, BoxValue)> {
let mut values = vec![
("version", BoxValue::from(self.version)),
("flags", BoxValue::from(self.flags)),
("track_id", BoxValue::from(self.track_id)),
];
if let Some(val) = self.base_data_offset {
values.push(("base_data_offset", BoxValue::from(val)));
}
if let Some(val) = self.sample_description_index {
values.push(("sample_description_index", BoxValue::from(val)));
}
if let Some(val) = self.default_sample_duration {
values.push(("default_sample_duration", BoxValue::from(val)));
}
if let Some(val) = self.default_sample_size {
values.push(("default_sample_size", BoxValue::from(val)));
}
if let Some(val) = self.default_sample_flags {
values.push(("default_sample_flags", BoxValue::from(val)));
}
values
}
fn get_short_name() -> &'static str {
"tfhd"
}
fn get_long_name() -> &'static str {
"Track Fragment Header Box"
}
fn get_inner_boxes(self) -> Option<Vec<super::IsoBoxData>> {
None
}
fn get_inner_boxes_ref(&self) -> Option<Vec<(&IsoBoxInfo, Option<&dyn IsoBoxEntry>)>> {
None
}
}
| 32.154545 | 91 | 0.60475 |
e87f4f40c3d3ae8753b2ec70d43092d6173e5a70 | 227 | use askama::Template;
#[derive(Template)]
#[template(
source = r#"{% for v in values %}{{ loop.cycle("r", "g", "b") }}{{ v }},{% endfor %}"#,
ext = "txt"
)]
struct ForCycle<'a> {
values: &'a [u8],
}
fn main() {
}
| 16.214286 | 91 | 0.497797 |
01b4486bc782c2b2d12a19d15b2612a692e6974e | 13,284 | // Copyright 2018 Mesh TensorFlow authors, T5 Authors and HuggingFace Inc. team.
// Copyright 2020 Guillaume Becquin
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::common::dropout::Dropout;
use crate::t5::layer_norm::T5LayerNorm;
use crate::t5::T5Config;
use std::borrow::Borrow;
use tch::nn::LinearConfig;
use tch::{nn, Device, Kind, Tensor};
#[derive(Debug)]
/// # Cache for T5 attention layers
/// Stores the cached value of key, value and key to avoid recalculation (e.g. at each generation step)
pub struct LayerState {
/// Cached keys
pub prev_key: Tensor,
/// Cached values
pub prev_value: Tensor,
}
impl Clone for LayerState {
fn clone(&self) -> Self {
LayerState {
prev_key: self.prev_key.copy(),
prev_value: self.prev_value.copy(),
}
}
}
impl LayerState {
pub(crate) fn reorder_cache(&mut self, new_indices: &Tensor) {
self.prev_key = self.prev_key.index_select(0, new_indices);
self.prev_value = self.prev_value.index_select(0, new_indices);
}
}
#[derive(Debug)]
pub struct T5Attention {
is_decoder: bool,
is_bidirectional: bool,
has_relative_attention_bias: bool,
relative_attention_num_buckets: i64,
d_model: i64,
d_kv: i64,
n_heads: i64,
dropout: Dropout,
inner_dim: i64,
output_attentions: bool,
store_cache: bool,
query: nn::Linear,
key: nn::Linear,
value: nn::Linear,
output: nn::Linear,
relative_attention_bias: Option<nn::Embedding>,
}
impl T5Attention {
pub fn new<'p, P>(
p: P,
config: &T5Config,
is_decoder: bool,
is_bidirectional: bool,
store_cache: bool,
output_attentions: bool,
has_relative_attention_bias: bool,
) -> T5Attention
where
P: Borrow<nn::Path<'p>>,
{
let p = p.borrow();
let linear_config = LinearConfig {
bias: false,
..Default::default()
};
let inner_dim = config.num_heads * config.d_kv;
let key = nn::linear(p / "k", config.d_model, inner_dim, linear_config);
let value = nn::linear(p / "v", config.d_model, inner_dim, linear_config);
let query = nn::linear(p / "q", config.d_model, inner_dim, linear_config);
let output = nn::linear(p / "o", inner_dim, config.d_model, linear_config);
let dropout = Dropout::new(config.dropout_rate);
let relative_attention_bias = if has_relative_attention_bias {
Some(nn::embedding(
p / "relative_attention_bias",
config.relative_attention_num_buckets,
config.num_heads,
Default::default(),
))
} else {
None
};
T5Attention {
is_decoder,
is_bidirectional,
has_relative_attention_bias,
relative_attention_num_buckets: config.relative_attention_num_buckets,
d_model: config.d_model,
d_kv: config.d_kv,
n_heads: config.num_heads,
dropout,
inner_dim,
output_attentions,
store_cache,
query,
key,
value,
output,
relative_attention_bias,
}
}
fn unshape(&self, x: Tensor, bs: i64) -> Tensor {
x.transpose(1, 2)
.contiguous()
.view((bs, -1, self.inner_dim))
}
fn shape(&self, x: Tensor, bs: i64) -> Tensor {
x.view((bs, -1, self.n_heads, self.d_kv)).transpose(1, 2)
}
pub fn forward_t(
&self,
hidden_states: &Tensor,
key_value_states: Option<&Tensor>,
position_bias: Option<&Tensor>,
attention_mask: Option<&Tensor>,
mut layer_state: Option<LayerState>,
query_length: Option<i64>,
train: bool,
) -> (Tensor, Option<Tensor>, Option<Tensor>, Option<LayerState>) {
let input_size = hidden_states.size();
let (bs, seq_length, _) = (input_size[0], input_size[1], input_size[2]);
let real_seq_length = if layer_state.is_some() {
match query_length {
Some(value) => value,
None => seq_length + layer_state.as_ref().unwrap().prev_key.size()[2],
}
} else {
seq_length
};
let key_length = match key_value_states {
Some(value) => value.size()[1],
None => real_seq_length,
};
let q: Tensor = self.shape(hidden_states.as_ref().apply(&self.query), bs);
let (mut k, mut v) = if key_value_states.is_none() {
(
self.shape(hidden_states.apply(&self.key), bs),
self.shape(hidden_states.apply(&self.value), bs),
)
} else {
(
self.shape(key_value_states.as_ref().unwrap().apply(&self.key), bs),
self.shape(key_value_states.as_ref().unwrap().apply(&self.value), bs),
)
};
if layer_state.is_some() {
let layer_state = layer_state.as_ref().unwrap();
if key_value_states.is_none() {
k = Tensor::cat(&[&layer_state.prev_key, &k], 2);
v = Tensor::cat(&[&layer_state.prev_value, &v], 2);
} else {
k = layer_state.prev_key.copy();
v = layer_state.prev_value.copy();
}
};
layer_state = if self.is_decoder & self.store_cache {
Some(LayerState {
prev_key: k.copy(),
prev_value: v.copy(),
})
} else {
None
};
let mut scores = Tensor::einsum("bnqd,bnkd->bnqk", &[q, k]);
let calculated_position_bias = if position_bias.is_none() {
let mut temp_value = if self.has_relative_attention_bias {
self.compute_bias(real_seq_length, key_length, hidden_states.device())
} else {
Tensor::zeros(
&[1, self.n_heads, real_seq_length, key_length],
(scores.kind(), scores.device()),
)
};
if layer_state.is_some() {
let length = temp_value.size()[2];
temp_value = temp_value.slice(2, length - seq_length, length, 1);
};
if let Some(attention_mask) = attention_mask {
temp_value = temp_value + attention_mask
};
Some(temp_value)
} else {
None
};
let position_bias = if let Some(position_bias) = position_bias {
position_bias
} else {
calculated_position_bias.as_ref().unwrap()
};
scores += position_bias;
let attention_weights = scores
.softmax(-1, Kind::Float)
.apply_t(&self.dropout, train);
let context = self
.unshape(attention_weights.matmul(&v), bs)
.apply(&self.output);
let attention_weights = if self.output_attentions {
Some(attention_weights)
} else {
None
};
let position_bias = if self.has_relative_attention_bias {
calculated_position_bias
} else {
None
};
(context, attention_weights, position_bias, layer_state)
}
fn get_relative_position_bucket(
&self,
relative_position: &Tensor,
bidirectional: bool,
num_buckets: i64,
max_distance: i64,
) -> Tensor {
let n = -relative_position;
let mut num_buckets = num_buckets;
let mut ret = n.zeros_like();
let n = if bidirectional {
num_buckets /= 2;
ret += n.lt(0).to_kind(Kind::Int64) * num_buckets;
n.abs()
} else {
n.max_other(&n.zeros_like())
};
let max_exact = num_buckets / 2;
let is_small = n.lt(max_exact);
let value_if_large: Tensor = ((n.to_kind(Kind::Float) / max_exact as f64).log2()
/ (max_distance as f64 / max_exact as f64).log2()
* (num_buckets - max_exact) as f64)
.to_kind(Kind::Int64)
+ max_exact;
let value_if_large = value_if_large.min_other(&value_if_large.full_like(num_buckets - 1));
ret += n.where_self(&is_small, &value_if_large);
ret
}
fn compute_bias(&self, q_len: i64, k_len: i64, device: Device) -> Tensor {
let context_position = Tensor::arange(q_len, (Kind::Int64, device)).unsqueeze(1);
let memory_position = Tensor::arange(k_len, (Kind::Int64, device)).unsqueeze(0);
let relative_position = memory_position - context_position;
let rp_bucket = self.get_relative_position_bucket(
&relative_position,
self.is_bidirectional,
self.relative_attention_num_buckets,
128,
);
rp_bucket
.apply(self.relative_attention_bias.as_ref().unwrap())
.permute(&[2, 0, 1])
.unsqueeze(0)
}
}
pub struct T5LayerSelfAttention {
self_attention: T5Attention,
layer_norm: T5LayerNorm,
dropout: Dropout,
}
impl T5LayerSelfAttention {
pub fn new<'p, P>(
p: P,
config: &T5Config,
has_relative_attention_bias: bool,
is_decoder: bool,
store_cache: bool,
output_attentions: bool,
) -> T5LayerSelfAttention
where
P: Borrow<nn::Path<'p>>,
{
let p = p.borrow();
let self_attention = T5Attention::new(
p / "SelfAttention",
config,
is_decoder,
!is_decoder,
store_cache,
output_attentions,
has_relative_attention_bias,
);
let layer_norm =
T5LayerNorm::new(p / "layer_norm", config.d_model, config.layer_norm_epsilon);
let dropout = Dropout::new(config.dropout_rate);
T5LayerSelfAttention {
self_attention,
layer_norm,
dropout,
}
}
pub fn forward_t(
&self,
hidden_states: &Tensor,
position_bias: Option<&Tensor>,
attention_mask: Option<&Tensor>,
layer_state: Option<LayerState>,
train: bool,
) -> (Tensor, Option<Tensor>, Option<Tensor>, Option<LayerState>) {
let norm_x = hidden_states.apply(&self.layer_norm);
let (y, attention_weights, position_bias, layer_state) = self.self_attention.forward_t(
&norm_x,
None,
position_bias,
attention_mask,
layer_state,
None,
train,
);
let output = hidden_states + y.apply_t(&self.dropout, train);
(output, attention_weights, position_bias, layer_state)
}
}
pub struct T5LayerCrossAttention {
encoder_decoder_attention: T5Attention,
layer_norm: T5LayerNorm,
dropout: Dropout,
}
impl T5LayerCrossAttention {
pub fn new<'p, P>(
p: P,
config: &T5Config,
has_relative_attention_bias: bool,
is_decoder: bool,
store_cache: bool,
output_attentions: bool,
) -> T5LayerCrossAttention
where
P: Borrow<nn::Path<'p>>,
{
let p = p.borrow();
let encoder_decoder_attention = T5Attention::new(
p / "EncDecAttention",
config,
is_decoder,
true,
store_cache,
output_attentions,
has_relative_attention_bias,
);
let layer_norm =
T5LayerNorm::new(p / "layer_norm", config.d_model, config.layer_norm_epsilon);
let dropout = Dropout::new(config.dropout_rate);
T5LayerCrossAttention {
encoder_decoder_attention,
layer_norm,
dropout,
}
}
pub fn forward_t(
&self,
hidden_states: &Tensor,
kv: Option<&Tensor>,
position_bias: Option<&Tensor>,
attention_mask: Option<&Tensor>,
layer_state: Option<LayerState>,
query_length: Option<i64>,
train: bool,
) -> (Tensor, Option<Tensor>, Option<Tensor>, Option<LayerState>) {
let norm_x = hidden_states.apply(&self.layer_norm);
let (y, attention_weights, position_bias, layer_state) =
self.encoder_decoder_attention.forward_t(
&norm_x,
kv,
position_bias,
attention_mask,
layer_state,
query_length,
train,
);
let output = hidden_states + y.apply_t(&self.dropout, train);
(output, attention_weights, position_bias, layer_state)
}
}
| 30.678984 | 103 | 0.568127 |
f82dafa25172090462eb371f810c91ead8f92a6d | 2,902 | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(doc_cfg)]
#![feature(target_feature, cfg_target_feature)]
// @has doc_cfg/struct.Portable.html
// @!has - '//*[@id="main"]/*[@class="stability"]/*[@class="stab portability"]' ''
// @has - '//*[@id="method.unix_and_arm_only_function"]' 'fn unix_and_arm_only_function()'
// @has - '//*[@class="stab portability"]' 'This is supported on Unix and ARM only.'
pub struct Portable;
// @has doc_cfg/unix_only/index.html \
// '//*[@id="main"]/*[@class="stability"]/*[@class="stab portability"]' \
// 'This is supported on Unix only.'
// @matches - '//*[@class="module-item"]//*[@class="stab portability"]' '\AUnix\Z'
// @matches - '//*[@class="module-item"]//*[@class="stab portability"]' '\AUnix and ARM\Z'
// @count - '//*[@class="stab portability"]' 3
#[doc(cfg(unix))]
pub mod unix_only {
// @has doc_cfg/unix_only/fn.unix_only_function.html \
// '//*[@id="main"]/*[@class="stability"]/*[@class="stab portability"]' \
// 'This is supported on Unix only.'
// @count - '//*[@class="stab portability"]' 1
pub fn unix_only_function() {
content::should::be::irrelevant();
}
// @has doc_cfg/unix_only/trait.ArmOnly.html \
// '//*[@id="main"]/*[@class="stability"]/*[@class="stab portability"]' \
// 'This is supported on Unix and ARM only.'
// @count - '//*[@class="stab portability"]' 3
#[doc(cfg(target_arch = "arm"))]
pub trait ArmOnly {
fn unix_and_arm_only_function();
}
impl ArmOnly for super::Portable {
fn unix_and_arm_only_function() {}
}
}
// tagging a function with `#[target_feature]` creates a doc(cfg(target_feature)) node for that
// item as well
// the portability header is different on the module view versus the full view
// @has doc_cfg/index.html
// @matches - '//*[@class="module-item"]//*[@class="stab portability"]' '\Aavx\Z'
// @has doc_cfg/fn.uses_target_feature.html
// @has - '//*[@id="main"]/*[@class="stability"]/*[@class="stab portability"]' \
// 'This is supported with target feature avx only.'
#[target_feature(enable = "avx")]
pub unsafe fn uses_target_feature() {
content::should::be::irrelevant();
}
// @has doc_cfg/fn.uses_cfg_target_feature.html
// @has - '//*[@id="main"]/*[@class="stability"]/*[@class="stab portability"]' \
// 'This is supported with target feature avx only.'
#[doc(cfg(target_feature = "avx"))]
pub fn uses_cfg_target_feature() {
uses_target_feature();
}
| 40.305556 | 95 | 0.649207 |
1ca682a46aaafe2a22c4165a24fe194222ca65e6 | 4,183 | use crate::read2::{ProcOutput, FILTERED_PATHS_PLACEHOLDER_LEN, HEAD_LEN, TAIL_LEN};
#[test]
fn test_abbreviate_short_string() {
let mut out = ProcOutput::new();
out.extend(b"Hello world!", &[]);
assert_eq!(b"Hello world!", &*out.into_bytes());
}
#[test]
fn test_abbreviate_short_string_multiple_steps() {
let mut out = ProcOutput::new();
out.extend(b"Hello ", &[]);
out.extend(b"world!", &[]);
assert_eq!(b"Hello world!", &*out.into_bytes());
}
#[test]
fn test_abbreviate_long_string() {
let mut out = ProcOutput::new();
let data = vec![b'.'; HEAD_LEN + TAIL_LEN + 16];
out.extend(&data, &[]);
let mut expected = vec![b'.'; HEAD_LEN];
expected.extend_from_slice(b"\n\n<<<<<< SKIPPED 16 BYTES >>>>>>\n\n");
expected.extend_from_slice(&vec![b'.'; TAIL_LEN]);
// We first check the length to avoid endless terminal output if the length differs, since
// `out` is hundreds of KBs in size.
let out = out.into_bytes();
assert_eq!(expected.len(), out.len());
assert_eq!(expected, out);
}
#[test]
fn test_abbreviate_long_string_multiple_steps() {
let mut out = ProcOutput::new();
out.extend(&vec![b'.'; HEAD_LEN], &[]);
out.extend(&vec![b'.'; TAIL_LEN], &[]);
// Also test whether the rotation works
out.extend(&vec![b'!'; 16], &[]);
out.extend(&vec![b'?'; 16], &[]);
let mut expected = vec![b'.'; HEAD_LEN];
expected.extend_from_slice(b"\n\n<<<<<< SKIPPED 32 BYTES >>>>>>\n\n");
expected.extend_from_slice(&vec![b'.'; TAIL_LEN - 32]);
expected.extend_from_slice(&vec![b'!'; 16]);
expected.extend_from_slice(&vec![b'?'; 16]);
// We first check the length to avoid endless terminal output if the length differs, since
// `out` is hundreds of KBs in size.
let out = out.into_bytes();
assert_eq!(expected.len(), out.len());
assert_eq!(expected, out);
}
#[test]
fn test_abbreviate_filterss_are_detected() {
let mut out = ProcOutput::new();
let filters = &["foo".to_string(), "quux".to_string()];
out.extend(b"Hello foo", filters);
// Check items from a previous extension are not double-counted.
out.extend(b"! This is a qu", filters);
// Check items are detected across extensions.
out.extend(b"ux.", filters);
match &out {
ProcOutput::Full { bytes, filtered_len } => assert_eq!(
*filtered_len,
bytes.len() + FILTERED_PATHS_PLACEHOLDER_LEN * filters.len()
- filters.iter().map(|i| i.len()).sum::<usize>()
),
ProcOutput::Abbreviated { .. } => panic!("out should not be abbreviated"),
}
assert_eq!(b"Hello foo! This is a quux.", &*out.into_bytes());
}
#[test]
fn test_abbreviate_filters_avoid_abbreviations() {
let mut out = ProcOutput::new();
let filters = &[std::iter::repeat('a').take(64).collect::<String>()];
let mut expected = vec![b'.'; HEAD_LEN - FILTERED_PATHS_PLACEHOLDER_LEN as usize];
expected.extend_from_slice(filters[0].as_bytes());
expected.extend_from_slice(&vec![b'.'; TAIL_LEN]);
out.extend(&expected, filters);
// We first check the length to avoid endless terminal output if the length differs, since
// `out` is hundreds of KBs in size.
let out = out.into_bytes();
assert_eq!(expected.len(), out.len());
assert_eq!(expected, out);
}
#[test]
fn test_abbreviate_filters_can_still_cause_abbreviations() {
let mut out = ProcOutput::new();
let filters = &[std::iter::repeat('a').take(64).collect::<String>()];
let mut input = vec![b'.'; HEAD_LEN];
input.extend_from_slice(&vec![b'.'; TAIL_LEN]);
input.extend_from_slice(filters[0].as_bytes());
let mut expected = vec![b'.'; HEAD_LEN];
expected.extend_from_slice(b"\n\n<<<<<< SKIPPED 64 BYTES >>>>>>\n\n");
expected.extend_from_slice(&vec![b'.'; TAIL_LEN - 64]);
expected.extend_from_slice(&vec![b'a'; 64]);
out.extend(&input, filters);
// We first check the length to avoid endless terminal output if the length differs, since
// `out` is hundreds of KBs in size.
let out = out.into_bytes();
assert_eq!(expected.len(), out.len());
assert_eq!(expected, out);
}
| 33.733871 | 94 | 0.633517 |
eb204cb571333ff4b79b709c49d5da8420454e5b | 4,323 | use serde::Serialize;
use super::*;
use crate::documents::BuildXML;
use crate::types::{AlignmentType, SpecialIndentType};
use crate::xml_builder::*;
#[derive(Serialize, Debug, Clone, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct ParagraphProperty {
pub run_property: RunProperty,
pub style: Option<ParagraphStyle>,
pub numbering_property: Option<NumberingProperty>,
pub alignment: Option<Justification>,
pub indent: Option<Indent>,
pub line_height: Option<u32>,
}
impl Default for ParagraphProperty {
fn default() -> Self {
ParagraphProperty {
run_property: RunProperty::new(),
style: None,
numbering_property: None,
alignment: None,
indent: None,
line_height: None,
}
}
}
// 17.3.1.26
// pPr (Paragraph Properties)
// This element specifies a set of paragraph properties which shall be applied to the contents of the parent
// paragraph after all style/numbering/table properties have been applied to the text. These properties are defined
// as direct formatting, since they are directly applied to the paragraph and supersede any formatting from styles.
impl ParagraphProperty {
pub fn new() -> ParagraphProperty {
Default::default()
}
pub fn align(mut self, alignment_type: AlignmentType) -> Self {
self.alignment = Some(Justification::new(alignment_type.to_string()));
self
}
pub fn style(mut self, style_id: &str) -> Self {
self.style = Some(ParagraphStyle::new(Some(style_id)));
self
}
pub fn indent(
mut self,
left: Option<i32>,
special_indent: Option<SpecialIndentType>,
end: Option<i32>,
start_chars: Option<i32>,
) -> Self {
self.indent = Some(Indent::new(left, special_indent, end, start_chars));
self
}
pub fn numbering(mut self, id: NumberingId, level: IndentLevel) -> Self {
self.numbering_property = Some(NumberingProperty::new().add_num(id, level));
self
}
pub fn line_height(mut self, h: u32) -> Self {
self.line_height = Some(h);
self
}
}
impl BuildXML for ParagraphProperty {
fn build(&self) -> Vec<u8> {
let spacing = if let Some(s) = self.line_height {
Some(Spacing::new(crate::SpacingType::Line(s)))
} else {
None
};
XMLBuilder::new()
.open_paragraph_property()
.add_child(&self.run_property)
.add_optional_child(&self.style)
.add_optional_child(&self.numbering_property)
.add_optional_child(&self.alignment)
.add_optional_child(&self.indent)
.add_optional_child(&spacing)
.close()
.build()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[cfg(test)]
use pretty_assertions::assert_eq;
use std::str;
#[test]
fn test_default() {
let c = ParagraphProperty::new();
let b = c.build();
assert_eq!(str::from_utf8(&b).unwrap(), r#"<w:pPr><w:rPr /></w:pPr>"#);
}
#[test]
fn test_alignment() {
let c = ParagraphProperty::new();
let b = c.align(AlignmentType::Right).build();
assert_eq!(
str::from_utf8(&b).unwrap(),
r#"<w:pPr><w:rPr /><w:jc w:val="right" /></w:pPr>"#
);
}
#[test]
fn test_indent() {
let c = ParagraphProperty::new();
let b = c.indent(Some(20), None, None, None).build();
assert_eq!(
str::from_utf8(&b).unwrap(),
r#"<w:pPr><w:rPr /><w:ind w:left="20" w:right="0" /></w:pPr>"#
);
}
#[test]
fn test_indent_json() {
let c = ParagraphProperty::new();
let b = c.indent(Some(20), Some(SpecialIndentType::FirstLine(10)), None, None);
assert_eq!(
serde_json::to_string(&b).unwrap(),
r#"{"runProperty":{"sz":null,"szCs":null,"color":null,"highlight":null,"underline":null,"bold":null,"boldCs":null,"italic":null,"italicCs":null,"vanish":null,"spacing":null,"fonts":null},"style":null,"numberingProperty":null,"alignment":null,"indent":{"start":20,"startChars":null,"end":null,"specialIndent":{"type":"firstLine","val":10}},"lineHeight":null}"#
);
}
}
| 31.100719 | 371 | 0.595882 |
8f73836c41ead15d430c2fb1d5924c37ea8c5b2a | 9,763 | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! This module defines
//! 1) a list of constants for every keyword that
//! can appear in [Word::keyword]:
//! pub const KEYWORD = "KEYWORD"
//! 2) an `ALL_KEYWORDS` array with every keyword in it
//! This is not a list of *reserved* keywords: some of these can be
//! parsed as identifiers if the parser decides so. This means that
//! new keywords can be added here without affecting the parse result.
//!
//! As a matter of fact, most of these keywords are not used at all
//! and could be removed.
//! 3) a `RESERVED_FOR_TABLE_ALIAS` array with keywords reserved in a
//! "table alias" context.
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
/// Defines a string constant for a single keyword: `kw_def!(SELECT);`
/// expands to `pub const SELECT = "SELECT";`
macro_rules! kw_def {
($ident:ident = $string_keyword:expr) => {
pub const $ident: &'static str = $string_keyword;
};
($ident:ident) => {
kw_def!($ident = stringify!($ident));
};
}
/// Expands to a list of `kw_def!()` invocations for each keyword
/// and defines an ALL_KEYWORDS array of the defined constants.
macro_rules! define_keywords {
($(
$ident:ident $(= $string_keyword:expr)?
),*) => {
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[allow(non_camel_case_types)]
pub enum Keyword {
NoKeyword,
$($ident),*
}
pub const ALL_KEYWORDS_INDEX: &[Keyword] = &[
$(Keyword::$ident),*
];
$(kw_def!($ident $(= $string_keyword)?);)*
pub const ALL_KEYWORDS: &[&str] = &[
$($ident),*
];
};
}
// The following keywords should be sorted to be able to match using binary search
define_keywords!(
ABORT,
ABS,
ACTION,
ADD,
ALL,
ALLOCATE,
ALTER,
ANALYZE,
AND,
ANY,
APPLY,
ARE,
ARRAY,
ARRAY_AGG,
ARRAY_MAX_CARDINALITY,
AS,
ASC,
ASENSITIVE,
ASSERT,
ASYMMETRIC,
AT,
ATOMIC,
AUTHORIZATION,
AUTOINCREMENT,
AUTO_INCREMENT,
AVG,
AVRO,
BEGIN,
BEGIN_FRAME,
BEGIN_PARTITION,
BETWEEN,
BIGINT,
BINARY,
BLOB,
BOOLEAN,
BOTH,
BY,
BYTEA,
CACHE,
CALL,
CALLED,
CARDINALITY,
CASCADE,
CASCADED,
CASE,
CAST,
CEIL,
CEILING,
CHAIN,
CHAR,
CHARACTER,
CHARACTER_LENGTH,
CHAR_LENGTH,
CHECK,
CLOB,
CLOSE,
CLUSTER,
COALESCE,
COLLATE,
COLLECT,
COLUMN,
COLUMNS,
COMMIT,
COMMITTED,
COMPUTE,
CONDITION,
CONNECT,
CONSTRAINT,
CONTAINS,
CONVERT,
COPY,
CORR,
CORRESPONDING,
COUNT,
COVAR_POP,
COVAR_SAMP,
CREATE,
CROSS,
CSV,
CUBE,
CUME_DIST,
CURRENT,
CURRENT_CATALOG,
CURRENT_DATE,
CURRENT_DEFAULT_TRANSFORM_GROUP,
CURRENT_PATH,
CURRENT_ROLE,
CURRENT_ROW,
CURRENT_SCHEMA,
CURRENT_TIME,
CURRENT_TIMESTAMP,
CURRENT_TRANSFORM_GROUP_FOR_TYPE,
CURRENT_USER,
CURSOR,
CYCLE,
DATABASE,
DATE,
DAY,
DEALLOCATE,
DEC,
DECIMAL,
DECLARE,
DEFAULT,
DELETE,
DELIMITED,
DENSE_RANK,
DEREF,
DESC,
DESCRIBE,
DETERMINISTIC,
DIRECTORY,
DISCONNECT,
DISTINCT,
DISTRIBUTE,
DOUBLE,
DROP,
DYNAMIC,
EACH,
ELEMENT,
ELSE,
END,
END_EXEC = "END-EXEC",
END_FRAME,
END_PARTITION,
EQUALS,
ERROR,
ESCAPE,
EVENT,
EVERY,
EXCEPT,
EXEC,
EXECUTE,
EXISTS,
EXP,
EXPLAIN,
EXTENDED,
EXTERNAL,
EXTRACT,
FAIL,
FALSE,
FETCH,
FIELDS,
FILTER,
FIRST,
FIRST_VALUE,
FLOAT,
FLOOR,
FOLLOWING,
FOR,
FOREIGN,
FORMAT,
FRAME_ROW,
FREE,
FROM,
FULL,
FUNCTION,
FUSION,
GET,
GLOBAL,
GRANT,
GROUP,
GROUPING,
GROUPS,
HAVING,
HEADER,
HIVEVAR,
HOLD,
HOUR,
IDENTITY,
IF,
IGNORE,
ILIKE,
IN,
INDEX,
INDICATOR,
INNER,
INOUT,
INPUTFORMAT,
INSENSITIVE,
INSERT,
INT,
INTEGER,
INTERSECT,
INTERSECTION,
INTERVAL,
INTO,
IS,
ISOLATION,
JOIN,
JSONFILE,
KEY,
LAG,
LANGUAGE,
LARGE,
LAST,
LAST_VALUE,
LATERAL,
LEAD,
LEADING,
LEFT,
LEVEL,
LIKE,
LIKE_REGEX,
LIMIT,
LISTAGG,
LN,
LOCAL,
LOCALTIME,
LOCALTIMESTAMP,
LOCATION,
LOWER,
MANAGEDLOCATION,
MATCH,
MATERIALIZED,
MAX,
MEMBER,
MERGE,
METADATA,
METHOD,
MIN,
MINUTE,
MOD,
MODIFIES,
MODULE,
MONTH,
MSCK,
MULTISET,
NATIONAL,
NATURAL,
NCHAR,
NCLOB,
NEW,
NEXT,
NO,
NONE,
NORMALIZE,
NOSCAN,
NOT,
NTH_VALUE,
NTILE,
NULL,
NULLIF,
NULLS,
NUMERIC,
OBJECT,
OCCURRENCES_REGEX,
OCTET_LENGTH,
OF,
OFFSET,
OLD,
ON,
ONLY,
OPEN,
OR,
ORC,
ORDER,
OUT,
OUTER,
OUTPUTFORMAT,
OVER,
OVERFLOW,
OVERLAPS,
OVERLAY,
OVERWRITE,
PARAMETER,
PARQUET,
PARTITION,
PARTITIONED,
PARTITIONS,
PERCENT,
PERCENTILE_CONT,
PERCENTILE_DISC,
PERCENT_RANK,
PERIOD,
PORTION,
POSITION,
POSITION_REGEX,
POWER,
PRECEDES,
PRECEDING,
PRECISION,
PREPARE,
PRIMARY,
PROCEDURE,
PURGE,
RANGE,
RANK,
RCFILE,
READ,
READS,
REAL,
RECURSIVE,
REF,
REFERENCES,
REFERENCING,
REGCLASS,
REGR_AVGX,
REGR_AVGY,
REGR_COUNT,
REGR_INTERCEPT,
REGR_R2,
REGR_SLOPE,
REGR_SXX,
REGR_SXY,
REGR_SYY,
RELEASE,
RENAME,
REPAIR,
REPEATABLE,
REPLACE,
RESTRICT,
RESULT,
RETURN,
RETURNS,
REVOKE,
RIGHT,
ROLLBACK,
ROLLUP,
ROW,
ROWID,
ROWS,
ROW_NUMBER,
SAVEPOINT,
SCHEMA,
SCOPE,
SCROLL,
SEARCH,
SECOND,
SELECT,
SENSITIVE,
SEQUENCEFILE,
SERDE,
SERIALIZABLE,
SESSION,
SESSION_USER,
SET,
SHOW,
SIMILAR,
SMALLINT,
SOME,
SORT,
SPECIFIC,
SPECIFICTYPE,
SQL,
SQLEXCEPTION,
SQLSTATE,
SQLWARNING,
SQRT,
START,
STATIC,
STATISTICS,
STDDEV_POP,
STDDEV_SAMP,
STDIN,
STORED,
STRING,
SUBMULTISET,
SUBSTRING,
SUBSTRING_REGEX,
SUCCEEDS,
SUM,
SYMMETRIC,
SYNC,
SYSTEM,
SYSTEM_TIME,
SYSTEM_USER,
TABLE,
TABLESAMPLE,
TBLPROPERTIES,
TEMP,
TEMPORARY,
TEXT,
TEXTFILE,
THEN,
TIES,
TIME,
TIMESTAMP,
TIMEZONE_HOUR,
TIMEZONE_MINUTE,
TINYINT,
TO,
TOP,
TRAILING,
TRANSACTION,
TRANSLATE,
TRANSLATE_REGEX,
TRANSLATION,
TREAT,
TRIGGER,
TRIM,
TRIM_ARRAY,
TRUE,
TRUNCATE,
TRY_CAST,
UESCAPE,
UNBOUNDED,
UNCOMMITTED,
UNION,
UNIQUE,
UNKNOWN,
UNNEST,
UPDATE,
UPPER,
USER,
USING,
UUID,
VALUE,
VALUES,
VALUE_OF,
VARBINARY,
VARCHAR,
VARYING,
VAR_POP,
VAR_SAMP,
VERBOSE,
VERSIONING,
VIEW,
VIRTUAL,
WHEN,
WHENEVER,
WHERE,
WIDTH_BUCKET,
WINDOW,
WITH,
WITHIN,
WITHOUT,
WORK,
WRITE,
XOR,
YEAR,
ZONE
);
/// These keywords can't be used as a table alias, so that `FROM table_name alias`
/// can be parsed unambiguously without looking ahead.
pub const RESERVED_FOR_TABLE_ALIAS: &[Keyword] = &[
// Reserved as both a table and a column alias:
Keyword::WITH,
Keyword::EXPLAIN,
Keyword::ANALYZE,
Keyword::SELECT,
Keyword::WHERE,
Keyword::GROUP,
Keyword::SORT,
Keyword::HAVING,
Keyword::ORDER,
Keyword::TOP,
Keyword::LATERAL,
Keyword::VIEW,
Keyword::LIMIT,
Keyword::OFFSET,
Keyword::FETCH,
Keyword::UNION,
Keyword::EXCEPT,
Keyword::INTERSECT,
// Reserved only as a table alias in the `FROM`/`JOIN` clauses:
Keyword::ON,
Keyword::JOIN,
Keyword::INNER,
Keyword::CROSS,
Keyword::FULL,
Keyword::LEFT,
Keyword::RIGHT,
Keyword::NATURAL,
Keyword::USING,
Keyword::CLUSTER,
Keyword::DISTRIBUTE,
// for MSSQL-specific OUTER APPLY (seems reserved in most dialects)
Keyword::OUTER,
];
/// Can't be used as a column alias, so that `SELECT <expr> alias`
/// can be parsed unambiguously without looking ahead.
pub const RESERVED_FOR_COLUMN_ALIAS: &[Keyword] = &[
// Reserved as both a table and a column alias:
Keyword::WITH,
Keyword::EXPLAIN,
Keyword::ANALYZE,
Keyword::SELECT,
Keyword::WHERE,
Keyword::GROUP,
Keyword::SORT,
Keyword::HAVING,
Keyword::ORDER,
Keyword::TOP,
Keyword::LATERAL,
Keyword::VIEW,
Keyword::LIMIT,
Keyword::OFFSET,
Keyword::FETCH,
Keyword::UNION,
Keyword::EXCEPT,
Keyword::INTERSECT,
Keyword::CLUSTER,
Keyword::DISTRIBUTE,
// Reserved only as a column alias in the `SELECT` clause
Keyword::FROM,
];
| 17.249117 | 82 | 0.584861 |
9c6cc05abcb75a092f08a721d76fe9fef9e3b777 | 8,114 | use async_trait::async_trait;
use futures::channel::oneshot;
use nix::errno::Errno;
use parking_lot::Mutex;
use std::{convert::From, sync::Arc};
use uuid::Uuid;
use crate::{
bdev::nvmx::{
controller_inner::SpdkNvmeController,
NvmeController,
NvmeControllerState,
NvmeDeviceHandle,
NvmeNamespace,
NVME_CONTROLLERS,
},
core::{
BlockDevice,
BlockDeviceDescriptor,
BlockDeviceHandle,
BlockDeviceIoStats,
CoreError,
DeviceEventType,
DeviceIoController,
DeviceTimeoutAction,
IoType,
},
ffihelper::{cb_arg, done_cb},
};
pub struct NvmeBlockDevice {
ns: Arc<NvmeNamespace>,
name: String,
}
/// Descriptor for an opened NVMe device that represents a namespace for
/// an NVMe controller.
pub struct NvmeDeviceDescriptor {
ns: Arc<NvmeNamespace>,
ctrlr: SpdkNvmeController,
io_device_id: u64,
name: String,
prchk_flags: u32,
}
impl NvmeDeviceDescriptor {
fn create(
controller: &NvmeController,
) -> Result<Box<dyn BlockDeviceDescriptor>, CoreError> {
if let Some(ns) = controller.namespace() {
Ok(Box::new(NvmeDeviceDescriptor {
ns,
io_device_id: controller.id(),
name: controller.get_name(),
ctrlr: controller.controller().unwrap(),
prchk_flags: controller.flags(),
}))
} else {
Err(CoreError::OpenBdev {
source: Errno::ENODEV,
})
}
}
}
impl BlockDeviceDescriptor for NvmeDeviceDescriptor {
fn get_device(&self) -> Box<dyn BlockDevice> {
Box::new(NvmeBlockDevice::from_ns(&self.name, self.ns.clone()))
}
fn into_handle(
self: Box<Self>,
) -> Result<Box<dyn BlockDeviceHandle>, CoreError> {
Ok(Box::new(NvmeDeviceHandle::create(
&self.name,
self.io_device_id,
self.ctrlr,
self.ns,
self.prchk_flags,
)?))
}
fn get_io_handle(&self) -> Result<Box<dyn BlockDeviceHandle>, CoreError> {
Ok(Box::new(NvmeDeviceHandle::create(
&self.name,
self.io_device_id,
self.ctrlr,
self.ns.clone(),
self.prchk_flags,
)?))
}
fn unclaim(&self) {
warn!("unclaim() is not implemented for NvmeDeviceDescriptor yet");
}
}
impl NvmeBlockDevice {
pub fn open_by_name(
name: &str,
read_write: bool,
) -> Result<Box<dyn BlockDeviceDescriptor>, CoreError> {
// TODO: Handle read_write flag properly.
if !read_write {
warn!("read-only mode is not supported in NvmeBlockDevice::open_by_name()");
}
let controller = NVME_CONTROLLERS.lookup_by_name(name).ok_or(
CoreError::BdevNotFound {
name: name.to_string(),
},
)?;
let controller = controller.lock();
// Make sure controller is available.
if controller.get_state() == NvmeControllerState::Running {
let descr = NvmeDeviceDescriptor::create(&controller)?;
Ok(descr)
} else {
Err(CoreError::BdevNotFound {
name: name.to_string(),
})
}
}
pub fn from_ns(name: &str, ns: Arc<NvmeNamespace>) -> NvmeBlockDevice {
NvmeBlockDevice {
ns,
name: String::from(name),
}
}
}
#[async_trait(?Send)]
impl BlockDevice for NvmeBlockDevice {
fn size_in_bytes(&self) -> u64 {
self.ns.size_in_bytes()
}
fn block_len(&self) -> u64 {
self.ns.block_len()
}
fn num_blocks(&self) -> u64 {
self.ns.num_blocks()
}
fn uuid(&self) -> Uuid {
self.ns.uuid()
}
fn product_name(&self) -> String {
"NVMe disk".to_string()
}
fn driver_name(&self) -> String {
String::from("nvme")
}
fn device_name(&self) -> String {
self.name.clone()
}
fn alignment(&self) -> u64 {
self.ns.alignment()
}
fn io_type_supported(&self, io_type: IoType) -> bool {
// bdev_nvme_io_type_supported
match io_type {
IoType::Read
| IoType::Write
| IoType::Reset
| IoType::Flush
| IoType::NvmeAdmin
| IoType::NvmeIo
| IoType::Abort => true,
IoType::Compare => self.ns.supports_compare(),
IoType::NvmeIoMd => self.ns.md_size() > 0,
IoType::Unmap => false,
IoType::WriteZeros => false,
IoType::CompareAndWrite => false,
_ => false,
}
}
async fn io_stats(&self) -> Result<BlockDeviceIoStats, CoreError> {
let carc = NVME_CONTROLLERS.lookup_by_name(&self.name).ok_or(
CoreError::BdevNotFound {
name: self.name.to_string(),
},
)?;
let (s, r) =
oneshot::channel::<Result<BlockDeviceIoStats, CoreError>>();
// Schedule async I/O stats collection and wait for the result.
{
let controller = carc.lock();
controller.get_io_stats(
|stats, ch| {
done_cb(ch, stats);
},
cb_arg(s),
)?;
}
r.await.expect("Failed awaiting at io_stats")
}
fn claimed_by(&self) -> Option<String> {
None
}
fn open(
&self,
read_write: bool,
) -> Result<Box<dyn BlockDeviceDescriptor>, CoreError> {
NvmeBlockDevice::open_by_name(&self.name, read_write)
}
fn get_io_controller(&self) -> Option<Box<dyn DeviceIoController>> {
Some(Box::new(NvmeDeviceIoController::new(self.name.to_string())))
}
fn add_event_listener(
&self,
listener: fn(DeviceEventType, &str),
) -> Result<(), CoreError> {
let controller = NVME_CONTROLLERS.lookup_by_name(&self.name).ok_or(
CoreError::BdevNotFound {
name: self.name.clone(),
},
)?;
let controller = controller.lock();
controller.add_event_listener(listener)
}
}
struct NvmeDeviceIoController {
name: String,
}
impl NvmeDeviceIoController {
pub fn new(name: String) -> Self {
Self {
name,
}
}
fn lookup_controller(
&self,
) -> Result<Arc<Mutex<NvmeController<'static>>>, CoreError> {
let controller = NVME_CONTROLLERS.lookup_by_name(&self.name).ok_or(
CoreError::BdevNotFound {
name: self.name.to_string(),
},
)?;
Ok(controller)
}
}
impl DeviceIoController for NvmeDeviceIoController {
fn get_timeout_action(&self) -> Result<DeviceTimeoutAction, CoreError> {
let controller = self.lookup_controller()?;
let controller = controller.lock();
controller.get_timeout_action()
}
fn set_timeout_action(
&mut self,
action: DeviceTimeoutAction,
) -> Result<(), CoreError> {
let controller = self.lookup_controller()?;
let mut controller = controller.lock();
controller.set_timeout_action(action)
}
}
/*
* Lookup target NVMeOF device by its name (starts with nvmf://).
*/
pub fn lookup_by_name(name: &str) -> Option<Box<dyn BlockDevice>> {
if let Some(c) = NVME_CONTROLLERS.lookup_by_name(name) {
let controller = c.lock();
// Make sure controller is available.
if controller.get_state() == NvmeControllerState::Running {
let ns = controller
.namespace()
.expect("no namespaces for this controller");
return Some(Box::new(NvmeBlockDevice::from_ns(name, ns)));
}
}
debug!("{}: NVMe controller not found", name);
None
}
pub fn open_by_name(
name: &str,
read_write: bool,
) -> Result<Box<dyn BlockDeviceDescriptor>, CoreError> {
NvmeBlockDevice::open_by_name(name, read_write)
}
| 26.429967 | 88 | 0.56421 |
116ad072837928a7cf573aa1e790528059b4e942 | 17,219 | use clippy_utils::diagnostics::span_lint;
use clippy_utils::{in_macro, trait_ref_of_method};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir::intravisit::{
walk_fn_decl, walk_generic_param, walk_generics, walk_item, walk_param_bound, walk_poly_trait_ref, walk_ty,
NestedVisitorMap, Visitor,
};
use rustc_hir::FnRetTy::Return;
use rustc_hir::{
BareFnTy, BodyId, FnDecl, GenericArg, GenericBound, GenericParam, GenericParamKind, Generics, ImplItem,
ImplItemKind, Item, ItemKind, LangItem, Lifetime, LifetimeName, ParamName, PolyTraitRef, TraitBoundModifier,
TraitFn, TraitItem, TraitItemKind, Ty, TyKind, WhereClause, WherePredicate,
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::map::Map;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
use rustc_span::symbol::{kw, Symbol};
declare_clippy_lint! {
/// **What it does:** Checks for lifetime annotations which can be removed by
/// relying on lifetime elision.
///
/// **Why is this bad?** The additional lifetimes make the code look more
/// complicated, while there is nothing out of the ordinary going on. Removing
/// them leads to more readable code.
///
/// **Known problems:**
/// - We bail out if the function has a `where` clause where lifetimes
/// are mentioned due to potenial false positives.
/// - Lifetime bounds such as `impl Foo + 'a` and `T: 'a` must be elided with the
/// placeholder notation `'_` because the fully elided notation leaves the type bound to `'static`.
///
/// **Example:**
/// ```rust
/// // Bad: unnecessary lifetime annotations
/// fn in_and_out<'a>(x: &'a u8, y: u8) -> &'a u8 {
/// x
/// }
///
/// // Good
/// fn elided(x: &u8, y: u8) -> &u8 {
/// x
/// }
/// ```
pub NEEDLESS_LIFETIMES,
complexity,
"using explicit lifetimes for references in function arguments when elision rules \
would allow omitting them"
}
declare_clippy_lint! {
/// **What it does:** Checks for lifetimes in generics that are never used
/// anywhere else.
///
/// **Why is this bad?** The additional lifetimes make the code look more
/// complicated, while there is nothing out of the ordinary going on. Removing
/// them leads to more readable code.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// // Bad: unnecessary lifetimes
/// fn unused_lifetime<'a>(x: u8) {
/// // ..
/// }
///
/// // Good
/// fn no_lifetime(x: u8) {
/// // ...
/// }
/// ```
pub EXTRA_UNUSED_LIFETIMES,
complexity,
"unused lifetimes in function definitions"
}
declare_lint_pass!(Lifetimes => [NEEDLESS_LIFETIMES, EXTRA_UNUSED_LIFETIMES]);
impl<'tcx> LateLintPass<'tcx> for Lifetimes {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
if let ItemKind::Fn(ref sig, ref generics, id) = item.kind {
check_fn_inner(cx, sig.decl, Some(id), generics, item.span, true);
}
}
fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx ImplItem<'_>) {
if let ImplItemKind::Fn(ref sig, id) = item.kind {
let report_extra_lifetimes = trait_ref_of_method(cx, item.hir_id()).is_none();
check_fn_inner(
cx,
sig.decl,
Some(id),
&item.generics,
item.span,
report_extra_lifetimes,
);
}
}
fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'_>) {
if let TraitItemKind::Fn(ref sig, ref body) = item.kind {
let body = match *body {
TraitFn::Required(_) => None,
TraitFn::Provided(id) => Some(id),
};
check_fn_inner(cx, sig.decl, body, &item.generics, item.span, true);
}
}
}
/// The lifetime of a &-reference.
#[derive(PartialEq, Eq, Hash, Debug, Clone)]
enum RefLt {
Unnamed,
Static,
Named(Symbol),
}
fn check_fn_inner<'tcx>(
cx: &LateContext<'tcx>,
decl: &'tcx FnDecl<'_>,
body: Option<BodyId>,
generics: &'tcx Generics<'_>,
span: Span,
report_extra_lifetimes: bool,
) {
if in_macro(span) || has_where_lifetimes(cx, &generics.where_clause) {
return;
}
let types = generics
.params
.iter()
.filter(|param| matches!(param.kind, GenericParamKind::Type { .. }));
for typ in types {
for bound in typ.bounds {
let mut visitor = RefVisitor::new(cx);
walk_param_bound(&mut visitor, bound);
if visitor.lts.iter().any(|lt| matches!(lt, RefLt::Named(_))) {
return;
}
if let GenericBound::Trait(ref trait_ref, _) = *bound {
let params = &trait_ref
.trait_ref
.path
.segments
.last()
.expect("a path must have at least one segment")
.args;
if let Some(params) = *params {
let lifetimes = params.args.iter().filter_map(|arg| match arg {
GenericArg::Lifetime(lt) => Some(lt),
_ => None,
});
for bound in lifetimes {
if bound.name != LifetimeName::Static && !bound.is_elided() {
return;
}
}
}
}
}
}
if could_use_elision(cx, decl, body, generics.params) {
span_lint(
cx,
NEEDLESS_LIFETIMES,
span.with_hi(decl.output.span().hi()),
"explicit lifetimes given in parameter types where they could be elided \
(or replaced with `'_` if needed by type declaration)",
);
}
if report_extra_lifetimes {
self::report_extra_lifetimes(cx, decl, generics);
}
}
fn could_use_elision<'tcx>(
cx: &LateContext<'tcx>,
func: &'tcx FnDecl<'_>,
body: Option<BodyId>,
named_generics: &'tcx [GenericParam<'_>],
) -> bool {
// There are two scenarios where elision works:
// * no output references, all input references have different LT
// * output references, exactly one input reference with same LT
// All lifetimes must be unnamed, 'static or defined without bounds on the
// level of the current item.
// check named LTs
let allowed_lts = allowed_lts_from(named_generics);
// these will collect all the lifetimes for references in arg/return types
let mut input_visitor = RefVisitor::new(cx);
let mut output_visitor = RefVisitor::new(cx);
// extract lifetimes in input argument types
for arg in func.inputs {
input_visitor.visit_ty(arg);
}
// extract lifetimes in output type
if let Return(ty) = func.output {
output_visitor.visit_ty(ty);
}
for lt in named_generics {
input_visitor.visit_generic_param(lt)
}
if input_visitor.abort() || output_visitor.abort() {
return false;
}
if allowed_lts
.intersection(
&input_visitor
.nested_elision_site_lts
.iter()
.chain(output_visitor.nested_elision_site_lts.iter())
.cloned()
.filter(|v| matches!(v, RefLt::Named(_)))
.collect(),
)
.next()
.is_some()
{
return false;
}
let input_lts = input_visitor.lts;
let output_lts = output_visitor.lts;
if let Some(body_id) = body {
let mut checker = BodyLifetimeChecker {
lifetimes_used_in_body: false,
};
checker.visit_expr(&cx.tcx.hir().body(body_id).value);
if checker.lifetimes_used_in_body {
return false;
}
}
// check for lifetimes from higher scopes
for lt in input_lts.iter().chain(output_lts.iter()) {
if !allowed_lts.contains(lt) {
return false;
}
}
// no input lifetimes? easy case!
if input_lts.is_empty() {
false
} else if output_lts.is_empty() {
// no output lifetimes, check distinctness of input lifetimes
// only unnamed and static, ok
let unnamed_and_static = input_lts.iter().all(|lt| *lt == RefLt::Unnamed || *lt == RefLt::Static);
if unnamed_and_static {
return false;
}
// we have no output reference, so we only need all distinct lifetimes
input_lts.len() == unique_lifetimes(&input_lts)
} else {
// we have output references, so we need one input reference,
// and all output lifetimes must be the same
if unique_lifetimes(&output_lts) > 1 {
return false;
}
if input_lts.len() == 1 {
match (&input_lts[0], &output_lts[0]) {
(&RefLt::Named(n1), &RefLt::Named(n2)) if n1 == n2 => true,
(&RefLt::Named(_), &RefLt::Unnamed) => true,
_ => false, /* already elided, different named lifetimes
* or something static going on */
}
} else {
false
}
}
}
fn allowed_lts_from(named_generics: &[GenericParam<'_>]) -> FxHashSet<RefLt> {
let mut allowed_lts = FxHashSet::default();
for par in named_generics.iter() {
if let GenericParamKind::Lifetime { .. } = par.kind {
if par.bounds.is_empty() {
allowed_lts.insert(RefLt::Named(par.name.ident().name));
}
}
}
allowed_lts.insert(RefLt::Unnamed);
allowed_lts.insert(RefLt::Static);
allowed_lts
}
/// Number of unique lifetimes in the given vector.
#[must_use]
fn unique_lifetimes(lts: &[RefLt]) -> usize {
lts.iter().collect::<FxHashSet<_>>().len()
}
const CLOSURE_TRAIT_BOUNDS: [LangItem; 3] = [LangItem::Fn, LangItem::FnMut, LangItem::FnOnce];
/// A visitor usable for `rustc_front::visit::walk_ty()`.
struct RefVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
lts: Vec<RefLt>,
nested_elision_site_lts: Vec<RefLt>,
unelided_trait_object_lifetime: bool,
}
impl<'a, 'tcx> RefVisitor<'a, 'tcx> {
fn new(cx: &'a LateContext<'tcx>) -> Self {
Self {
cx,
lts: Vec::new(),
nested_elision_site_lts: Vec::new(),
unelided_trait_object_lifetime: false,
}
}
fn record(&mut self, lifetime: &Option<Lifetime>) {
if let Some(ref lt) = *lifetime {
if lt.name == LifetimeName::Static {
self.lts.push(RefLt::Static);
} else if let LifetimeName::Param(ParamName::Fresh(_)) = lt.name {
// Fresh lifetimes generated should be ignored.
} else if lt.is_elided() {
self.lts.push(RefLt::Unnamed);
} else {
self.lts.push(RefLt::Named(lt.name.ident().name));
}
} else {
self.lts.push(RefLt::Unnamed);
}
}
fn all_lts(&self) -> Vec<RefLt> {
self.lts
.iter()
.chain(self.nested_elision_site_lts.iter())
.cloned()
.collect::<Vec<_>>()
}
fn abort(&self) -> bool {
self.unelided_trait_object_lifetime
}
}
impl<'a, 'tcx> Visitor<'tcx> for RefVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
// for lifetimes as parameters of generics
fn visit_lifetime(&mut self, lifetime: &'tcx Lifetime) {
self.record(&Some(*lifetime));
}
fn visit_poly_trait_ref(&mut self, poly_tref: &'tcx PolyTraitRef<'tcx>, tbm: TraitBoundModifier) {
let trait_ref = &poly_tref.trait_ref;
if CLOSURE_TRAIT_BOUNDS.iter().any(|&item| {
self.cx
.tcx
.lang_items()
.require(item)
.map_or(false, |id| Some(id) == trait_ref.trait_def_id())
}) {
let mut sub_visitor = RefVisitor::new(self.cx);
sub_visitor.visit_trait_ref(trait_ref);
self.nested_elision_site_lts.append(&mut sub_visitor.all_lts());
} else {
walk_poly_trait_ref(self, poly_tref, tbm);
}
}
fn visit_ty(&mut self, ty: &'tcx Ty<'_>) {
match ty.kind {
TyKind::OpaqueDef(item, _) => {
let map = self.cx.tcx.hir();
let item = map.item(item);
walk_item(self, item);
walk_ty(self, ty);
},
TyKind::BareFn(&BareFnTy { decl, .. }) => {
let mut sub_visitor = RefVisitor::new(self.cx);
sub_visitor.visit_fn_decl(decl);
self.nested_elision_site_lts.append(&mut sub_visitor.all_lts());
return;
},
TyKind::TraitObject(bounds, ref lt, _) => {
if !lt.is_elided() {
self.unelided_trait_object_lifetime = true;
}
for bound in bounds {
self.visit_poly_trait_ref(bound, TraitBoundModifier::None);
}
return;
},
_ => (),
}
walk_ty(self, ty);
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
/// Are any lifetimes mentioned in the `where` clause? If so, we don't try to
/// reason about elision.
fn has_where_lifetimes<'tcx>(cx: &LateContext<'tcx>, where_clause: &'tcx WhereClause<'_>) -> bool {
for predicate in where_clause.predicates {
match *predicate {
WherePredicate::RegionPredicate(..) => return true,
WherePredicate::BoundPredicate(ref pred) => {
// a predicate like F: Trait or F: for<'a> Trait<'a>
let mut visitor = RefVisitor::new(cx);
// walk the type F, it may not contain LT refs
walk_ty(&mut visitor, pred.bounded_ty);
if !visitor.all_lts().is_empty() {
return true;
}
// if the bounds define new lifetimes, they are fine to occur
let allowed_lts = allowed_lts_from(pred.bound_generic_params);
// now walk the bounds
for bound in pred.bounds.iter() {
walk_param_bound(&mut visitor, bound);
}
// and check that all lifetimes are allowed
if visitor.all_lts().iter().any(|it| !allowed_lts.contains(it)) {
return true;
}
},
WherePredicate::EqPredicate(ref pred) => {
let mut visitor = RefVisitor::new(cx);
walk_ty(&mut visitor, pred.lhs_ty);
walk_ty(&mut visitor, pred.rhs_ty);
if !visitor.lts.is_empty() {
return true;
}
},
}
}
false
}
struct LifetimeChecker {
map: FxHashMap<Symbol, Span>,
}
impl<'tcx> Visitor<'tcx> for LifetimeChecker {
type Map = Map<'tcx>;
// for lifetimes as parameters of generics
fn visit_lifetime(&mut self, lifetime: &'tcx Lifetime) {
self.map.remove(&lifetime.name.ident().name);
}
fn visit_generic_param(&mut self, param: &'tcx GenericParam<'_>) {
// don't actually visit `<'a>` or `<'a: 'b>`
// we've already visited the `'a` declarations and
// don't want to spuriously remove them
// `'b` in `'a: 'b` is useless unless used elsewhere in
// a non-lifetime bound
if let GenericParamKind::Type { .. } = param.kind {
walk_generic_param(self, param)
}
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
fn report_extra_lifetimes<'tcx>(cx: &LateContext<'tcx>, func: &'tcx FnDecl<'_>, generics: &'tcx Generics<'_>) {
let hs = generics
.params
.iter()
.filter_map(|par| match par.kind {
GenericParamKind::Lifetime { .. } => Some((par.name.ident().name, par.span)),
_ => None,
})
.collect();
let mut checker = LifetimeChecker { map: hs };
walk_generics(&mut checker, generics);
walk_fn_decl(&mut checker, func);
for &v in checker.map.values() {
span_lint(
cx,
EXTRA_UNUSED_LIFETIMES,
v,
"this lifetime isn't used in the function definition",
);
}
}
struct BodyLifetimeChecker {
lifetimes_used_in_body: bool,
}
impl<'tcx> Visitor<'tcx> for BodyLifetimeChecker {
type Map = Map<'tcx>;
// for lifetimes as parameters of generics
fn visit_lifetime(&mut self, lifetime: &'tcx Lifetime) {
if lifetime.name.ident().name != kw::Empty && lifetime.name.ident().name != kw::StaticLifetime {
self.lifetimes_used_in_body = true;
}
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
| 33.370155 | 112 | 0.560311 |
de6bd189fbb914c4eadc0aaa9c32aef6f3dc0ca6 | 1,686 | #[cfg(not(lib_build))]
#[macro_use]
extern crate log;
use log::{Level, LevelFilter, Log, Metadata, Record};
use std::sync::{Arc, Mutex};
#[cfg(feature = "std")]
use log::set_boxed_logger;
#[cfg(not(feature = "std"))]
fn set_boxed_logger(logger: Box<dyn Log>) -> Result<(), log::SetLoggerError> {
log::set_logger(Box::leak(logger))
}
struct State {
last_log: Mutex<Option<Level>>,
}
struct Logger(Arc<State>);
impl Log for Logger {
fn enabled(&self, _: &Metadata) -> bool {
true
}
fn log(&self, record: &Record) {
*self.0.last_log.lock().unwrap() = Some(record.level());
}
fn flush(&self) {}
}
#[cfg_attr(lib_build, test)]
fn main() {
let me = Arc::new(State {
last_log: Mutex::new(None),
});
let a = me.clone();
set_boxed_logger(Box::new(Logger(me))).unwrap();
test(&a, LevelFilter::Off);
test(&a, LevelFilter::Error);
test(&a, LevelFilter::Warn);
test(&a, LevelFilter::Info);
test(&a, LevelFilter::Debug);
test(&a, LevelFilter::Trace);
}
fn test(a: &State, filter: LevelFilter) {
log::set_max_level(filter);
error!("");
last(&a, t(Level::Error, filter));
warn!("");
last(&a, t(Level::Warn, filter));
info!("");
last(&a, t(Level::Info, filter));
debug!("");
last(&a, t(Level::Debug, filter));
trace!("");
last(&a, t(Level::Trace, filter));
fn t(lvl: Level, filter: LevelFilter) -> Option<Level> {
if lvl <= filter {
Some(lvl)
} else {
None
}
}
}
fn last(state: &State, expected: Option<Level>) {
let lvl = state.last_log.lock().unwrap().take();
assert_eq!(lvl, expected);
}
| 22.48 | 78 | 0.572954 |
e2e43e2d0bb068ea73dd89ea42d6e0262ab58410 | 24,606 | // here it's mostly crlf which is made less clear by using the ln forms
#![allow(clippy::print_with_newline)]
use crate::prelude::*;
use std::io::Write as _;
#[derive(serde::Deserialize, Debug, Default)]
pub struct Config {
#[serde(default)]
client: crate::config::Client,
}
impl crate::config::Config for Config {
fn merge_args<'a>(
&mut self,
matches: &clap::ArgMatches<'a>,
) -> Result<()> {
self.client.merge_args(matches)
}
fn run(
&self,
) -> Box<dyn futures::Future<Item = (), Error = Error> + Send> {
let auth = match self.client.auth {
crate::protocol::AuthType::Plain => {
let username = self
.client
.username
.clone()
.context(crate::error::CouldntFindUsername);
match username {
Ok(username) => crate::protocol::Auth::plain(&username),
Err(e) => return Box::new(futures::future::err(e)),
}
}
crate::protocol::AuthType::RecurseCenter => {
let id = crate::client::load_client_auth_id(self.client.auth);
crate::protocol::Auth::recurse_center(
id.as_ref().map(std::string::String::as_str),
)
}
};
let host = self.client.host().to_string();
let address = *self.client.addr();
if self.client.tls {
let connector = match native_tls::TlsConnector::new()
.context(crate::error::CreateConnector)
{
Ok(connector) => connector,
Err(e) => return Box::new(futures::future::err(e)),
};
let make_connector: Box<
dyn Fn() -> crate::client::Connector<_> + Send,
> = Box::new(move || {
let host = host.clone();
let connector = connector.clone();
Box::new(move || {
let host = host.clone();
let connector = connector.clone();
let connector = tokio_tls::TlsConnector::from(connector);
let stream =
tokio::net::tcp::TcpStream::connect(&address);
Box::new(
stream
.context(crate::error::Connect { address })
.and_then(move |stream| {
connector.connect(&host, stream).context(
crate::error::ConnectTls { host },
)
}),
)
})
});
Box::new(WatchSession::new(make_connector, &auth))
} else {
let make_connector: Box<
dyn Fn() -> crate::client::Connector<_> + Send,
> = Box::new(move || {
Box::new(move || {
Box::new(
tokio::net::tcp::TcpStream::connect(&address)
.context(crate::error::Connect { address }),
)
})
});
Box::new(WatchSession::new(make_connector, &auth))
}
}
}
pub fn cmd<'a, 'b>(app: clap::App<'a, 'b>) -> clap::App<'a, 'b> {
crate::config::Client::cmd(app.about("Watch teleterm streams"))
}
pub fn config(
mut config: Option<config::Config>,
) -> Result<Box<dyn crate::config::Config>> {
if config.is_none() {
config = crate::config::wizard::run()?;
}
let config: Config = if let Some(config) = config {
config
.try_into()
.context(crate::error::CouldntParseConfig)?
} else {
Config::default()
};
Ok(Box::new(config))
}
// XXX https://github.com/rust-lang/rust/issues/64362
#[allow(dead_code)]
enum State<S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Send + 'static> {
Temporary,
LoggingIn {
alternate_screen: crossterm::screen::AlternateScreen,
},
Choosing {
sessions: crate::session_list::SessionList,
alternate_screen: crossterm::screen::AlternateScreen,
},
Watching {
client: Box<crate::client::Client<S>>,
},
}
impl<S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Send + 'static>
State<S>
{
fn new() -> Self {
Self::Temporary
}
fn logging_in(&mut self) -> Result<()> {
let prev_state = std::mem::replace(self, Self::Temporary);
*self = match prev_state {
Self::Temporary => unreachable!(),
Self::LoggingIn { alternate_screen } => {
Self::LoggingIn { alternate_screen }
}
Self::Choosing {
alternate_screen, ..
} => Self::LoggingIn { alternate_screen },
_ => Self::LoggingIn {
alternate_screen: new_alternate_screen()?,
},
};
Ok(())
}
fn choosing(
&mut self,
sessions: crate::session_list::SessionList,
) -> Result<()> {
let prev_state = std::mem::replace(self, Self::Temporary);
*self = match prev_state {
Self::Temporary => unreachable!(),
Self::LoggingIn { alternate_screen } => Self::Choosing {
alternate_screen,
sessions,
},
Self::Choosing {
alternate_screen, ..
} => Self::Choosing {
alternate_screen,
sessions,
},
_ => Self::Choosing {
alternate_screen: new_alternate_screen()?,
sessions,
},
};
Ok(())
}
fn watching(&mut self, client: crate::client::Client<S>) {
if let Self::Temporary = self {
unreachable!()
}
*self = Self::Watching {
client: Box::new(client),
}
}
}
struct WatchSession<
S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Send + 'static,
> {
term_type: String,
make_connector: Box<dyn Fn() -> crate::client::Connector<S> + Send>,
auth: crate::protocol::Auth,
key_reader: crate::key_reader::KeyReader,
list_client: crate::client::Client<S>,
resizer: Box<
dyn futures::Stream<Item = (u16, u16), Error = crate::error::Error>
+ Send,
>,
state: State<S>,
raw_screen: Option<crossterm::screen::RawScreen>,
needs_redraw: bool,
}
impl<S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Send + 'static>
WatchSession<S>
{
fn new(
make_connector: Box<dyn Fn() -> crate::client::Connector<S> + Send>,
auth: &crate::protocol::Auth,
) -> Self {
let term_type =
std::env::var("TERM").unwrap_or_else(|_| "".to_string());
let list_client = crate::client::Client::list(
&term_type,
make_connector(),
auth,
crate::protocol::AuthClient::Cli,
);
Self {
term_type,
make_connector,
auth: auth.clone(),
key_reader: crate::key_reader::KeyReader::new(),
list_client,
resizer: Box::new(
tokio_terminal_resize::resizes()
.flatten_stream()
.context(crate::error::Resize),
),
state: State::new(),
raw_screen: None,
needs_redraw: true,
}
}
fn reconnect(&mut self, hard: bool) -> Result<()> {
self.state.logging_in()?;
self.needs_redraw = true;
if hard {
self.list_client.reconnect();
} else {
self.list_client
.send_message(crate::protocol::Message::list_sessions());
}
Ok(())
}
fn loading_keypress(
&mut self,
e: &crossterm::input::InputEvent,
) -> Result<bool> {
match e {
crossterm::input::InputEvent::Keyboard(
crossterm::input::KeyEvent::Char('q'),
) => {
return Ok(true);
}
_ => {}
}
Ok(false)
}
fn list_server_message(
&mut self,
msg: crate::protocol::Message,
) -> Result<()> {
match msg {
crate::protocol::Message::Sessions { sessions } => {
self.state.choosing(
crate::session_list::SessionList::new(
sessions,
crate::term::Size::get()?,
),
)?;
self.needs_redraw = true;
}
crate::protocol::Message::Disconnected => {
self.reconnect(true)?;
}
crate::protocol::Message::Error { msg } => {
return Err(Error::Server { message: msg });
}
msg => {
return Err(crate::error::Error::UnexpectedMessage {
message: msg,
});
}
}
Ok(())
}
fn list_keypress(
&mut self,
e: &crossterm::input::InputEvent,
) -> Result<bool> {
let sessions =
if let State::Choosing { sessions, .. } = &mut self.state {
sessions
} else {
unreachable!()
};
match e {
crossterm::input::InputEvent::Keyboard(
crossterm::input::KeyEvent::Char(' '),
) => {
self.list_client
.send_message(crate::protocol::Message::list_sessions());
}
crossterm::input::InputEvent::Keyboard(
crossterm::input::KeyEvent::Char('q'),
) => {
return Ok(true);
}
crossterm::input::InputEvent::Keyboard(
crossterm::input::KeyEvent::Char('<'),
) => {
sessions.prev_page();
self.needs_redraw = true;
}
crossterm::input::InputEvent::Keyboard(
crossterm::input::KeyEvent::Char('>'),
) => {
sessions.next_page();
self.needs_redraw = true;
}
crossterm::input::InputEvent::Keyboard(
crossterm::input::KeyEvent::Char(c),
) => {
if let Some(id) = sessions.id_for(*c) {
let client = crate::client::Client::watch(
&self.term_type,
(self.make_connector)(),
&self.auth,
crate::protocol::AuthClient::Cli,
id,
);
self.state.watching(client);
clear()?;
}
}
_ => {}
}
Ok(false)
}
fn watch_server_message(
&mut self,
msg: crate::protocol::Message,
) -> Result<()> {
match msg {
crate::protocol::Message::TerminalOutput { data } => {
// TODO async
let stdout = std::io::stdout();
let mut stdout = stdout.lock();
stdout.write(&data).context(crate::error::WriteTerminal)?;
stdout.flush().context(crate::error::FlushTerminal)?;
}
crate::protocol::Message::Disconnected => {
self.reconnect(false)?;
}
crate::protocol::Message::Error { msg } => {
return Err(Error::Server { message: msg });
}
crate::protocol::Message::Resize { .. } => {
// do nothing
}
msg => {
return Err(crate::error::Error::UnexpectedMessage {
message: msg,
});
}
}
Ok(())
}
fn watch_keypress(
&mut self,
e: &crossterm::input::InputEvent,
) -> Result<bool> {
match e {
crossterm::input::InputEvent::Keyboard(
crossterm::input::KeyEvent::Char('q'),
) => {
self.reconnect(false)?;
}
_ => {}
}
Ok(false)
}
fn resize(&mut self, size: crate::term::Size) -> Result<()> {
if let State::Choosing { sessions, .. } = &mut self.state {
sessions.resize(size);
self.needs_redraw = true;
}
Ok(())
}
fn redraw(&self) -> Result<()> {
match &self.state {
State::Temporary => unreachable!(),
State::LoggingIn { .. } => {
self.display_loading_screen()?;
}
State::Choosing { .. } => {
self.display_choosing_screen()?;
}
State::Watching { .. } => {}
}
Ok(())
}
fn display_loading_screen(&self) -> Result<()> {
clear()?;
print!("loading...\r\n");
if let Some(err) = self.list_client.last_error() {
print!("error: {}\r\n", err);
}
print!("q: quit --> ");
std::io::stdout()
.flush()
.context(crate::error::FlushTerminal)?;
Ok(())
}
fn display_choosing_screen(&self) -> Result<()> {
let sessions = if let State::Choosing { sessions, .. } = &self.state {
sessions
} else {
unreachable!()
};
let char_width = 2;
let max_name_width = (sessions.size().cols / 3) as usize;
let name_width = sessions
.visible_sessions()
.iter()
.map(|s| s.username.len())
.max()
.unwrap_or(4);
// XXX unstable
// let name_width = name_width.clamp(4, max_name_width);
let name_width = if name_width < 4 {
4
} else if name_width > max_name_width {
max_name_width
} else {
name_width
};
let size_width = 7;
let max_idle_time = sessions
.visible_sessions()
.iter()
.map(|s| s.idle_time)
.max()
.unwrap_or(4);
let idle_width = format_time(max_idle_time).len();
let idle_width = if idle_width < 4 { 4 } else { idle_width };
let watch_width = 5;
let max_title_width = (sessions.size().cols as usize)
- char_width
- 3
- name_width
- 3
- size_width
- 3
- idle_width
- 3
- watch_width
- 3;
clear()?;
print!("welcome to teleterm\r\n");
print!("available sessions:\r\n");
print!("\r\n");
print!(
"{:5$} | {:6$} | {:7$} | {:8$} | {:9$} | title\r\n",
"",
"name",
"size",
"idle",
"watch",
char_width,
name_width,
size_width,
idle_width,
watch_width,
);
print!(
"{}+{}+{}+{}+{}+{}\r\n",
"-".repeat(char_width + 1),
"-".repeat(name_width + 2),
"-".repeat(size_width + 2),
"-".repeat(idle_width + 2),
"-".repeat(watch_width + 2),
"-".repeat(max_title_width + 1)
);
let mut prev_name: Option<&str> = None;
for (c, session) in sessions.visible_sessions_with_chars() {
let first = if let Some(name) = prev_name {
name != session.username
} else {
true
};
let display_char = format!("{})", c);
let display_name = if first {
truncate(&session.username, max_name_width)
} else {
"".to_string()
};
let display_size_plain = format!("{}", &session.size);
let display_size_full = if session.size == sessions.size() {
// XXX i should be able to use crossterm::style here, but
// it has bugs
format!("\x1b[32m{}\x1b[m", display_size_plain)
} else if session.size.fits_in(sessions.size()) {
display_size_plain.clone()
} else {
// XXX i should be able to use crossterm::style here, but
// it has bugs
format!("\x1b[31m{}\x1b[m", display_size_plain)
};
let display_idle = format_time(session.idle_time);
let display_title = truncate(&session.title, max_title_width);
let display_watch = session.watchers;
print!(
"{:6$} | {:7$} | {:8$} | {:9$} | {:10$} | {}\r\n",
display_char,
display_name,
display_size_full,
display_idle,
display_watch,
display_title,
char_width,
name_width,
size_width
+ (display_size_full.len() - display_size_plain.len()),
idle_width,
watch_width,
);
prev_name = Some(&session.username);
}
print!(
"({}/{}) space: refresh, q: quit, <: prev page, >: next page --> ",
sessions.current_page(),
sessions.total_pages(),
);
std::io::stdout()
.flush()
.context(crate::error::FlushTerminal)?;
Ok(())
}
}
impl<S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Send + 'static>
WatchSession<S>
{
const POLL_FNS:
&'static [&'static dyn for<'a> Fn(
&'a mut Self,
)
-> component_future::Poll<
(),
Error,
>] = &[
&Self::poll_resizer,
&Self::poll_input,
&Self::poll_list_client,
&Self::poll_watch_client,
];
fn poll_resizer(&mut self) -> component_future::Poll<(), Error> {
let (rows, cols) =
component_future::try_ready!(self.resizer.poll()).unwrap();
self.resize(crate::term::Size { rows, cols })?;
Ok(component_future::Async::DidWork)
}
fn poll_input(&mut self) -> component_future::Poll<(), Error> {
if self.raw_screen.is_none() {
self.raw_screen = Some(new_raw_screen()?);
}
if let State::Temporary = self.state {
self.state = State::LoggingIn {
alternate_screen: new_alternate_screen()?,
}
}
let e = component_future::try_ready!(self.key_reader.poll()).unwrap();
let quit = match &mut self.state {
State::Temporary => unreachable!(),
State::LoggingIn { .. } => self.loading_keypress(&e)?,
State::Choosing { .. } => self.list_keypress(&e)?,
State::Watching { .. } => self.watch_keypress(&e)?,
};
if quit {
Ok(component_future::Async::Ready(()))
} else {
Ok(component_future::Async::DidWork)
}
}
fn poll_list_client(&mut self) -> component_future::Poll<(), Error> {
match component_future::try_ready!(self.list_client.poll()).unwrap() {
crate::client::Event::Disconnect => {
self.reconnect(true)?;
}
crate::client::Event::Connect => {
self.list_client
.send_message(crate::protocol::Message::list_sessions());
}
crate::client::Event::ServerMessage(msg) => {
self.list_server_message(msg)?;
}
}
Ok(component_future::Async::DidWork)
}
fn poll_watch_client(&mut self) -> component_future::Poll<(), Error> {
let client = if let State::Watching { client } = &mut self.state {
client
} else {
return Ok(component_future::Async::NothingToDo);
};
match component_future::try_ready!(client.poll()).unwrap() {
crate::client::Event::Disconnect => {
self.reconnect(true)?;
}
crate::client::Event::Connect => {}
crate::client::Event::ServerMessage(msg) => {
self.watch_server_message(msg)?;
}
}
Ok(component_future::Async::DidWork)
}
}
#[must_use = "futures do nothing unless polled"]
impl<S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Send + 'static>
futures::Future for WatchSession<S>
{
type Item = ();
type Error = Error;
fn poll(&mut self) -> futures::Poll<Self::Item, Self::Error> {
let res = component_future::poll_future(self, Self::POLL_FNS);
if res.is_err() {
self.state = State::Temporary; // drop alternate screen
self.raw_screen = None;
} else if self.needs_redraw {
self.redraw()?;
self.needs_redraw = false;
}
res
}
}
fn new_raw_screen() -> Result<crossterm::screen::RawScreen> {
crossterm::screen::RawScreen::into_raw_mode()
.context(crate::error::ToRawMode)
}
fn new_alternate_screen() -> Result<crossterm::screen::AlternateScreen> {
crossterm::screen::AlternateScreen::to_alternate(false)
.context(crate::error::ToAlternateScreen)
}
fn format_time(dur: u32) -> String {
let secs = dur % 60;
let dur = dur / 60;
if dur == 0 {
return format!("{}s", secs);
}
let mins = dur % 60;
let dur = dur / 60;
if dur == 0 {
return format!("{}m{:02}s", mins, secs);
}
let hours = dur % 24;
let dur = dur / 24;
if dur == 0 {
return format!("{}h{:02}m{:02}s", hours, mins, secs);
}
let days = dur;
format!("{}d{:02}h{:02}m{:02}s", days, hours, mins, secs)
}
fn truncate(s: &str, len: usize) -> String {
if s.len() <= len {
s.to_string()
} else {
format!("{}...", &s[..(len - 3)])
}
}
fn clear() -> Result<()> {
crossterm::execute!(
std::io::stdout(),
crossterm::cursor::MoveTo(0, 0),
crossterm::terminal::Clear(crossterm::terminal::ClearType::All)
)
.context(crate::error::WriteTerminalCrossterm)
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_truncate() {
assert_eq!(truncate("abcdefghij", 12), "abcdefghij");
assert_eq!(truncate("abcdefghij", 11), "abcdefghij");
assert_eq!(truncate("abcdefghij", 10), "abcdefghij");
assert_eq!(truncate("abcdefghij", 9), "abcdef...");
assert_eq!(truncate("abcdefghij", 8), "abcde...");
assert_eq!(truncate("abcdefghij", 7), "abcd...");
assert_eq!(truncate("", 7), "");
assert_eq!(truncate("a", 7), "a");
assert_eq!(truncate("ab", 7), "ab");
assert_eq!(truncate("abc", 7), "abc");
assert_eq!(truncate("abcd", 7), "abcd");
assert_eq!(truncate("abcde", 7), "abcde");
assert_eq!(truncate("abcdef", 7), "abcdef");
assert_eq!(truncate("abcdefg", 7), "abcdefg");
assert_eq!(truncate("abcdefgh", 7), "abcd...");
assert_eq!(truncate("abcdefghi", 7), "abcd...");
assert_eq!(truncate("abcdefghij", 7), "abcd...");
}
#[test]
fn test_format_time() {
assert_eq!(format_time(0), "0s");
assert_eq!(format_time(5), "5s");
assert_eq!(format_time(10), "10s");
assert_eq!(format_time(60), "1m00s");
assert_eq!(format_time(61), "1m01s");
assert_eq!(format_time(601), "10m01s");
assert_eq!(format_time(610), "10m10s");
assert_eq!(format_time(3599), "59m59s");
assert_eq!(format_time(3600), "1h00m00s");
assert_eq!(format_time(3601), "1h00m01s");
assert_eq!(format_time(3610), "1h00m10s");
assert_eq!(format_time(3660), "1h01m00s");
assert_eq!(format_time(3661), "1h01m01s");
assert_eq!(format_time(3670), "1h01m10s");
assert_eq!(format_time(4200), "1h10m00s");
assert_eq!(format_time(4201), "1h10m01s");
assert_eq!(format_time(4210), "1h10m10s");
assert_eq!(format_time(36000), "10h00m00s");
assert_eq!(format_time(86399), "23h59m59s");
assert_eq!(format_time(86400), "1d00h00m00s");
assert_eq!(format_time(86401), "1d00h00m01s");
assert_eq!(format_time(864_000), "10d00h00m00s");
assert_eq!(format_time(8_640_000), "100d00h00m00s");
assert_eq!(format_time(86_400_000), "1000d00h00m00s");
}
}
| 31.546154 | 79 | 0.483297 |
293ae38e1b14601f7f6a5deb3e23d32b232c694c | 6,550 | //!
//! slot history
//!
pub use crate::clock::Slot;
use bv::BitVec;
use bv::BitsMut;
#[repr(C)]
#[derive(Serialize, Deserialize, PartialEq)]
pub struct SlotHistory {
pub bits: BitVec<u64>,
pub next_slot: Slot,
}
impl Default for SlotHistory {
fn default() -> Self {
let mut bits = BitVec::new_fill(false, MAX_ENTRIES);
bits.set(0, true);
Self { bits, next_slot: 1 }
}
}
impl std::fmt::Debug for SlotHistory {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "SlotHistory {{ slot: {} bits:", self.next_slot)?;
for i in 0..MAX_ENTRIES {
if self.bits.get(i) {
write!(f, "1")?;
} else {
write!(f, "0")?;
}
}
Ok(())
}
}
pub const MAX_ENTRIES: u64 = 1024 * 1024; // 1 million slots is about 5 days
#[derive(PartialEq, Debug)]
pub enum Check {
Future,
TooOld,
Found,
NotFound,
}
impl SlotHistory {
pub fn add(&mut self, slot: Slot) {
if slot > self.next_slot && slot - self.next_slot >= MAX_ENTRIES {
// Wrapped past current history,
// clear entire bitvec.
let full_blocks = (MAX_ENTRIES as usize) / 64;
for i in 0..full_blocks {
self.bits.set_block(i, 0);
}
} else {
for skipped in self.next_slot..slot {
self.bits.set(skipped % MAX_ENTRIES, false);
}
}
self.bits.set(slot % MAX_ENTRIES, true);
self.next_slot = slot + 1;
}
pub fn check(&self, slot: Slot) -> Check {
if slot > self.newest() {
Check::Future
} else if slot < self.oldest() {
Check::TooOld
} else if self.bits.get(slot % MAX_ENTRIES) {
Check::Found
} else {
Check::NotFound
}
}
pub fn oldest(&self) -> Slot {
self.next_slot.saturating_sub(MAX_ENTRIES)
}
pub fn newest(&self) -> Slot {
self.next_slot - 1
}
}
#[cfg(test)]
mod tests {
use super::*;
use log::*;
#[test]
fn slot_history_test1() {
solana_logger::setup();
// should be divisible by 64 since the clear logic works on blocks
assert_eq!(MAX_ENTRIES % 64, 0);
let mut slot_history = SlotHistory::default();
info!("add 2");
slot_history.add(2);
assert_eq!(slot_history.check(0), Check::Found);
assert_eq!(slot_history.check(1), Check::NotFound);
for i in 3..MAX_ENTRIES {
assert_eq!(slot_history.check(i), Check::Future);
}
info!("add 20");
slot_history.add(20);
info!("add max_entries");
slot_history.add(MAX_ENTRIES);
assert_eq!(slot_history.check(0), Check::TooOld);
assert_eq!(slot_history.check(1), Check::NotFound);
for i in &[2, 20, MAX_ENTRIES] {
assert_eq!(slot_history.check(*i), Check::Found);
}
for i in 3..20 {
assert_eq!(slot_history.check(i), Check::NotFound, "i: {}", i);
}
for i in 21..MAX_ENTRIES {
assert_eq!(slot_history.check(i), Check::NotFound, "i: {}", i);
}
assert_eq!(slot_history.check(MAX_ENTRIES + 1), Check::Future);
info!("add max_entries + 3");
let slot = 3 * MAX_ENTRIES + 3;
slot_history.add(slot);
for i in &[0, 1, 2, 20, 21, MAX_ENTRIES] {
assert_eq!(slot_history.check(*i), Check::TooOld);
}
let start = slot - MAX_ENTRIES + 1;
let end = slot;
for i in start..end {
assert_eq!(slot_history.check(i), Check::NotFound, "i: {}", i);
}
assert_eq!(slot_history.check(slot), Check::Found);
}
#[test]
fn slot_history_test_wrap() {
solana_logger::setup();
let mut slot_history = SlotHistory::default();
info!("add 2");
slot_history.add(2);
assert_eq!(slot_history.check(0), Check::Found);
assert_eq!(slot_history.check(1), Check::NotFound);
for i in 3..MAX_ENTRIES {
assert_eq!(slot_history.check(i), Check::Future);
}
info!("add 20");
slot_history.add(20);
info!("add max_entries + 19");
slot_history.add(MAX_ENTRIES + 19);
for i in 0..19 {
assert_eq!(slot_history.check(i), Check::TooOld);
}
assert_eq!(slot_history.check(MAX_ENTRIES), Check::NotFound);
assert_eq!(slot_history.check(20), Check::Found);
assert_eq!(slot_history.check(MAX_ENTRIES + 19), Check::Found);
assert_eq!(slot_history.check(20), Check::Found);
for i in 21..MAX_ENTRIES + 19 {
assert_eq!(slot_history.check(i), Check::NotFound, "found: {}", i);
}
assert_eq!(slot_history.check(MAX_ENTRIES + 20), Check::Future);
}
#[test]
fn slot_history_test_same_index() {
solana_logger::setup();
let mut slot_history = SlotHistory::default();
info!("add 3,4");
slot_history.add(3);
slot_history.add(4);
assert_eq!(slot_history.check(1), Check::NotFound);
assert_eq!(slot_history.check(2), Check::NotFound);
assert_eq!(slot_history.check(3), Check::Found);
assert_eq!(slot_history.check(4), Check::Found);
slot_history.add(MAX_ENTRIES + 5);
assert_eq!(slot_history.check(5), Check::TooOld);
for i in 6..MAX_ENTRIES + 5 {
assert_eq!(slot_history.check(i), Check::NotFound, "i: {}", i);
}
assert_eq!(slot_history.check(MAX_ENTRIES + 5), Check::Found);
}
#[test]
fn test_older_slot() {
let mut slot_history = SlotHistory::default();
slot_history.add(10);
slot_history.add(5);
assert_eq!(slot_history.check(0), Check::Found);
assert_eq!(slot_history.check(5), Check::Found);
// If we go backwards we reset?
assert_eq!(slot_history.check(10), Check::Future);
assert_eq!(slot_history.check(6), Check::Future);
assert_eq!(slot_history.check(11), Check::Future);
}
#[test]
fn test_oldest() {
let mut slot_history = SlotHistory::default();
assert_eq!(slot_history.oldest(), 0);
slot_history.add(10);
assert_eq!(slot_history.oldest(), 0);
slot_history.add(MAX_ENTRIES - 1);
assert_eq!(slot_history.oldest(), 0);
slot_history.add(MAX_ENTRIES);
assert_eq!(slot_history.oldest(), 1);
}
}
| 31.642512 | 79 | 0.560916 |
5b8b379183d1df81ed2dc2cf5cd246254e8bd434 | 14,110 | mod light;
mod mesh;
pub use light::*;
pub use mesh::*;
use wgpu::SamplerBindingType;
use crate::{AlphaMode, StandardMaterial, StandardMaterialUniformData, PBR_SHADER_HANDLE};
use bevy_asset::Handle;
use bevy_core_pipeline::{AlphaMask3d, Opaque3d, Transparent3d};
use bevy_ecs::{
prelude::*,
system::{lifetimeless::*, SystemParamItem},
};
use bevy_render::{
mesh::Mesh,
render_asset::RenderAssets,
render_phase::{
DrawFunctions, EntityRenderCommand, RenderCommandResult, RenderPhase, SetItemPipeline,
TrackedRenderPass,
},
render_resource::{std140::AsStd140, *},
renderer::RenderDevice,
view::{ExtractedView, Msaa, VisibleEntities},
};
// NOTE: These must match the bit flags in bevy_pbr2/src/render/pbr.wgsl!
bitflags::bitflags! {
#[repr(transparent)]
pub struct StandardMaterialFlags: u32 {
const BASE_COLOR_TEXTURE = (1 << 0);
const EMISSIVE_TEXTURE = (1 << 1);
const METALLIC_ROUGHNESS_TEXTURE = (1 << 2);
const OCCLUSION_TEXTURE = (1 << 3);
const DOUBLE_SIDED = (1 << 4);
const UNLIT = (1 << 5);
const ALPHA_MODE_OPAQUE = (1 << 6);
const ALPHA_MODE_MASK = (1 << 7);
const ALPHA_MODE_BLEND = (1 << 8);
const NONE = 0;
const UNINITIALIZED = 0xFFFF;
}
}
#[derive(Clone)]
pub struct PbrPipeline {
pub mesh_pipeline: MeshPipeline,
pub material_layout: BindGroupLayout,
}
impl FromWorld for PbrPipeline {
fn from_world(world: &mut World) -> Self {
let render_device = world.get_resource::<RenderDevice>().unwrap();
let material_layout = render_device.create_bind_group_layout(&BindGroupLayoutDescriptor {
entries: &[
BindGroupLayoutEntry {
binding: 0,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Buffer {
ty: BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: BufferSize::new(
StandardMaterialUniformData::std140_size_static() as u64,
),
},
count: None,
},
// Base Color Texture
BindGroupLayoutEntry {
binding: 1,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Texture {
multisampled: false,
sample_type: TextureSampleType::Float { filterable: true },
view_dimension: TextureViewDimension::D2,
},
count: None,
},
// Base Color Texture Sampler
BindGroupLayoutEntry {
binding: 2,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Sampler(SamplerBindingType::Filtering),
count: None,
},
// Emissive Texture
BindGroupLayoutEntry {
binding: 3,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Texture {
multisampled: false,
sample_type: TextureSampleType::Float { filterable: true },
view_dimension: TextureViewDimension::D2,
},
count: None,
},
// Emissive Texture Sampler
BindGroupLayoutEntry {
binding: 4,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Sampler(SamplerBindingType::Filtering),
count: None,
},
// Metallic Roughness Texture
BindGroupLayoutEntry {
binding: 5,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Texture {
multisampled: false,
sample_type: TextureSampleType::Float { filterable: true },
view_dimension: TextureViewDimension::D2,
},
count: None,
},
// Metallic Roughness Texture Sampler
BindGroupLayoutEntry {
binding: 6,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Sampler(SamplerBindingType::Filtering),
count: None,
},
// Occlusion Texture
BindGroupLayoutEntry {
binding: 7,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Texture {
multisampled: false,
sample_type: TextureSampleType::Float { filterable: true },
view_dimension: TextureViewDimension::D2,
},
count: None,
},
// Occlusion Texture Sampler
BindGroupLayoutEntry {
binding: 8,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Sampler(SamplerBindingType::Filtering),
count: None,
},
// Normal Map Texture
BindGroupLayoutEntry {
binding: 9,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Texture {
multisampled: false,
sample_type: TextureSampleType::Float { filterable: true },
view_dimension: TextureViewDimension::D2,
},
count: None,
},
// Normal Map Texture Sampler
BindGroupLayoutEntry {
binding: 10,
visibility: ShaderStages::FRAGMENT,
ty: BindingType::Sampler(SamplerBindingType::Filtering),
count: None,
},
],
label: Some("pbr_material_layout"),
});
PbrPipeline {
material_layout,
mesh_pipeline: world.get_resource::<MeshPipeline>().unwrap().clone(),
}
}
}
#[derive(Clone, Copy, Hash, PartialEq, Eq)]
pub struct PbrPipelineKey {
pub mesh_key: MeshPipelineKey,
pub normal_map: bool,
}
impl SpecializedPipeline for PbrPipeline {
type Key = PbrPipelineKey;
fn specialize(&self, key: Self::Key) -> RenderPipelineDescriptor {
let mut descriptor = self.mesh_pipeline.specialize(key.mesh_key);
descriptor.fragment.as_mut().unwrap().shader = PBR_SHADER_HANDLE.typed::<Shader>();
descriptor.layout = Some(vec![
self.mesh_pipeline.view_layout.clone(),
self.material_layout.clone(),
self.mesh_pipeline.mesh_layout.clone(),
]);
if key.normal_map {
descriptor
.fragment
.as_mut()
.unwrap()
.shader_defs
.push(String::from("STANDARDMATERIAL_NORMAL_MAP"));
}
if let Some(label) = &mut descriptor.label {
*label = format!("pbr_{}", *label).into();
}
descriptor
}
}
pub struct PbrViewBindGroup {
pub value: BindGroup,
}
#[allow(clippy::too_many_arguments)]
pub fn queue_meshes(
opaque_draw_functions: Res<DrawFunctions<Opaque3d>>,
alpha_mask_draw_functions: Res<DrawFunctions<AlphaMask3d>>,
transparent_draw_functions: Res<DrawFunctions<Transparent3d>>,
pbr_pipeline: Res<PbrPipeline>,
mut pipelines: ResMut<SpecializedPipelines<PbrPipeline>>,
mut pipeline_cache: ResMut<RenderPipelineCache>,
msaa: Res<Msaa>,
render_meshes: Res<RenderAssets<Mesh>>,
render_materials: Res<RenderAssets<StandardMaterial>>,
standard_material_meshes: Query<(&Handle<StandardMaterial>, &Handle<Mesh>, &MeshUniform)>,
mut views: Query<(
&ExtractedView,
&VisibleEntities,
&mut RenderPhase<Opaque3d>,
&mut RenderPhase<AlphaMask3d>,
&mut RenderPhase<Transparent3d>,
)>,
) {
for (view, visible_entities, mut opaque_phase, mut alpha_mask_phase, mut transparent_phase) in
views.iter_mut()
{
let draw_opaque_pbr = opaque_draw_functions.read().get_id::<DrawPbr>().unwrap();
let draw_alpha_mask_pbr = alpha_mask_draw_functions
.read()
.get_id::<DrawPbr>()
.unwrap();
let draw_transparent_pbr = transparent_draw_functions
.read()
.get_id::<DrawPbr>()
.unwrap();
let inverse_view_matrix = view.transform.compute_matrix().inverse();
let inverse_view_row_2 = inverse_view_matrix.row(2);
let mesh_key = MeshPipelineKey::from_msaa_samples(msaa.samples);
for visible_entity in &visible_entities.entities {
if let Ok((material_handle, mesh_handle, mesh_uniform)) =
standard_material_meshes.get(*visible_entity)
{
if let Some(material) = render_materials.get(material_handle) {
let mut pbr_key = PbrPipelineKey {
mesh_key,
normal_map: material.has_normal_map,
};
if let Some(mesh) = render_meshes.get(mesh_handle) {
if mesh.has_tangents {
pbr_key.mesh_key |= MeshPipelineKey::VERTEX_TANGENTS;
}
pbr_key.mesh_key |=
MeshPipelineKey::from_primitive_topology(mesh.primitive_topology);
}
if let AlphaMode::Blend = material.alpha_mode {
pbr_key.mesh_key |= MeshPipelineKey::TRANSPARENT_MAIN_PASS
}
let pipeline_id =
pipelines.specialize(&mut pipeline_cache, &pbr_pipeline, pbr_key);
// NOTE: row 2 of the inverse view matrix dotted with column 3 of the model matrix
// gives the z component of translation of the mesh in view space
let mesh_z = inverse_view_row_2.dot(mesh_uniform.transform.col(3));
match material.alpha_mode {
AlphaMode::Opaque => {
opaque_phase.add(Opaque3d {
entity: *visible_entity,
draw_function: draw_opaque_pbr,
pipeline: pipeline_id,
// NOTE: Front-to-back ordering for opaque with ascending sort means near should have the
// lowest sort key and getting further away should increase. As we have
// -z in front of the camera, values in view space decrease away from the
// camera. Flipping the sign of mesh_z results in the correct front-to-back ordering
distance: -mesh_z,
});
}
AlphaMode::Mask(_) => {
alpha_mask_phase.add(AlphaMask3d {
entity: *visible_entity,
draw_function: draw_alpha_mask_pbr,
pipeline: pipeline_id,
// NOTE: Front-to-back ordering for alpha mask with ascending sort means near should have the
// lowest sort key and getting further away should increase. As we have
// -z in front of the camera, values in view space decrease away from the
// camera. Flipping the sign of mesh_z results in the correct front-to-back ordering
distance: -mesh_z,
});
}
AlphaMode::Blend => {
transparent_phase.add(Transparent3d {
entity: *visible_entity,
draw_function: draw_transparent_pbr,
pipeline: pipeline_id,
// NOTE: Back-to-front ordering for transparent with ascending sort means far should have the
// lowest sort key and getting closer should increase. As we have
// -z in front of the camera, the largest distance is -far with values increasing toward the
// camera. As such we can just use mesh_z as the distance
distance: mesh_z,
});
}
}
}
}
}
}
}
pub type DrawPbr = (
SetItemPipeline,
SetMeshViewBindGroup<0>,
SetStandardMaterialBindGroup<1>,
SetMeshBindGroup<2>,
DrawMesh,
);
pub struct SetStandardMaterialBindGroup<const I: usize>;
impl<const I: usize> EntityRenderCommand for SetStandardMaterialBindGroup<I> {
type Param = (
SRes<RenderAssets<StandardMaterial>>,
SQuery<Read<Handle<StandardMaterial>>>,
);
#[inline]
fn render<'w>(
_view: Entity,
item: Entity,
(materials, handle_query): SystemParamItem<'w, '_, Self::Param>,
pass: &mut TrackedRenderPass<'w>,
) -> RenderCommandResult {
let handle = handle_query.get(item).unwrap();
let materials = materials.into_inner();
let material = materials.get(handle).unwrap();
pass.set_bind_group(I, &material.bind_group, &[]);
RenderCommandResult::Success
}
}
| 41.017442 | 125 | 0.520057 |
f94e58d02c421c2f66c76a8cc64941df83d88069 | 69,168 | use crate::data::error_info::ErrorInfo;
use crate::data::position::Position;
use crate::data::{
ast::Interval,
literal::ContentType,
message::Message,
primitive::{
tools_crypto, tools_jwt, tools_smtp, tools_time, Data, MessageData, Primitive,
PrimitiveArray, PrimitiveBoolean, PrimitiveInt, PrimitiveNull, PrimitiveString,
PrimitiveType, Right, MSG,
},
tokens::TYPES,
Literal,
};
use crate::error_format::*;
use crate::interpreter::{
builtins::http::http_request, json_to_rust::json_to_literal,
variable_handler::match_literals::match_obj,
};
use chrono::{DateTime, SecondsFormat, TimeZone, Utc};
use lettre::Transport;
use phf::phf_map;
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
use std::{collections::HashMap, sync::mpsc};
////////////////////////////////////////////////////////////////////////////////
// DATA STRUCTURES
////////////////////////////////////////////////////////////////////////////////
const FUNCTIONS_HTTP: phf::Map<&'static str, (PrimitiveMethod, Right)> = phf_map! {
"set" => (PrimitiveObject::set as PrimitiveMethod, Right::Read),
"auth" => (PrimitiveObject::auth as PrimitiveMethod, Right::Read),
"query" => (PrimitiveObject::query as PrimitiveMethod, Right::Read),
"get" => (PrimitiveObject::get_http as PrimitiveMethod, Right::Read),
"post" => (PrimitiveObject::post as PrimitiveMethod, Right::Read),
"put" => (PrimitiveObject::put as PrimitiveMethod, Right::Read),
"delete" => (PrimitiveObject::delete as PrimitiveMethod, Right::Read),
"patch" => (PrimitiveObject::patch as PrimitiveMethod, Right::Read),
"send" => (PrimitiveObject::send as PrimitiveMethod, Right::Read),
};
const FUNCTIONS_SMTP: phf::Map<&'static str, (PrimitiveMethod, Right)> = phf_map! {
"auth" => (PrimitiveObject::credentials as PrimitiveMethod, Right::Read),
"port" => (PrimitiveObject::port as PrimitiveMethod, Right::Read),
"tls" => (PrimitiveObject::smtp_tls as PrimitiveMethod, Right::Read),
"send" => (PrimitiveObject::smtp_send as PrimitiveMethod, Right::Read),
};
const FUNCTIONS_TIME: phf::Map<&'static str, (PrimitiveMethod, Right)> = phf_map! {
"at" => (PrimitiveObject::set_date_at as PrimitiveMethod, Right::Write),
"unix" => (PrimitiveObject::unix as PrimitiveMethod, Right::Write),
"format" => (PrimitiveObject::date_format as PrimitiveMethod, Right::Read),
"parse" => (PrimitiveObject::parse_date as PrimitiveMethod, Right::Read),
};
const FUNCTIONS_JWT: phf::Map<&'static str, (PrimitiveMethod, Right)> = phf_map! {
"sign" => (PrimitiveObject::jwt_sign as PrimitiveMethod, Right::Read),
"decode" => (PrimitiveObject::jwt_decode as PrimitiveMethod, Right::Read),
"verify" => (PrimitiveObject::jwt_verity as PrimitiveMethod, Right::Read),
};
const FUNCTIONS_CRYPTO: phf::Map<&'static str, (PrimitiveMethod, Right)> = phf_map! {
"create_hmac" => (PrimitiveObject::create_hmac as PrimitiveMethod, Right::Read),
"create_hash" => (PrimitiveObject::create_hash as PrimitiveMethod, Right::Read),
"digest" => (PrimitiveObject::digest as PrimitiveMethod, Right::Read),
};
const FUNCTIONS_BASE64: phf::Map<&'static str, (PrimitiveMethod, Right)> = phf_map! {
"encode" => (PrimitiveObject::base64_encode as PrimitiveMethod, Right::Read),
"decode" => (PrimitiveObject::base64_decode as PrimitiveMethod, Right::Read),
};
const FUNCTIONS_HEX: phf::Map<&'static str, (PrimitiveMethod, Right)> = phf_map! {
"encode" => (PrimitiveObject::hex_encode as PrimitiveMethod, Right::Read),
"decode" => (PrimitiveObject::hex_decode as PrimitiveMethod, Right::Read),
};
const FUNCTIONS_EVENT: phf::Map<&'static str, (PrimitiveMethod, Right)> = phf_map! {
"get_type" => (PrimitiveObject::get_type as PrimitiveMethod, Right::Read),
"get_content" => (PrimitiveObject::get_content as PrimitiveMethod, Right::Read),
"is_email" => (PrimitiveObject::is_email as PrimitiveMethod, Right::Read),
"match" => (PrimitiveObject::match_args as PrimitiveMethod, Right::Read),
"match_array" => (PrimitiveObject::match_array as PrimitiveMethod, Right::Read),
};
const FUNCTIONS_READ: phf::Map<&'static str, (PrimitiveMethod, Right)> = phf_map! {
"is_number" => (PrimitiveObject::is_number as PrimitiveMethod, Right::Read),
"is_int" => (PrimitiveObject::is_int as PrimitiveMethod, Right::Read),
"is_float" => (PrimitiveObject::is_float as PrimitiveMethod, Right::Read),
"type_of" => (PrimitiveObject::type_of as PrimitiveMethod, Right::Read),
"to_string" => (PrimitiveObject::to_string as PrimitiveMethod, Right::Read),
"contains" => (PrimitiveObject::contains as PrimitiveMethod, Right::Read),
"is_empty" => (PrimitiveObject::is_empty as PrimitiveMethod, Right::Read),
"length" => (PrimitiveObject::length as PrimitiveMethod, Right::Read),
"keys" => (PrimitiveObject::keys as PrimitiveMethod, Right::Read),
"values" => (PrimitiveObject::values as PrimitiveMethod, Right::Read),
"get" => (PrimitiveObject::get_generics as PrimitiveMethod, Right::Read),
};
const FUNCTIONS_WRITE: phf::Map<&'static str, (PrimitiveMethod, Right)> = phf_map! {
"clear_values" => (PrimitiveObject::clear_values as PrimitiveMethod, Right::Write),
"insert" => (PrimitiveObject::insert as PrimitiveMethod, Right::Write),
"remove" => (PrimitiveObject::remove as PrimitiveMethod, Right::Write),
};
type PrimitiveMethod = fn(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
content_type: &str,
) -> Result<Literal, ErrorInfo>;
#[derive(PartialEq, Debug, Clone, Serialize, Deserialize)]
pub struct PrimitiveObject {
pub value: HashMap<String, Literal>,
}
////////////////////////////////////////////////////////////////////////////////
// METHOD FUNCTIONS
////////////////////////////////////////////////////////////////////////////////
impl PrimitiveObject {
fn set(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "set(header: object) => http object";
if args.len() != 1 {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let literal = match args.get("arg0") {
Some(res) => res,
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
};
let mut object = object.to_owned();
let header = Literal::get_value::<HashMap<String, Literal>>(
&literal.primitive,
&data.context.flow,
interval,
ERROR_HTTP_SET.to_owned(),
)?;
insert_to_object(header, &mut object, "header", &data.context.flow, literal);
let mut result = PrimitiveObject::get_literal(&object.value, interval);
result.set_content_type("http");
Ok(result)
}
fn auth(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "auth(username, password) => http object";
if args.len() < 2 {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let username = match args.get("arg0") {
Some(lit) => Literal::get_value::<String>(
&lit.primitive,
&data.context.flow,
lit.interval,
format!("usage: {}", usage),
)?,
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
};
let password = match args.get("arg1") {
Some(lit) => Literal::get_value::<String>(
&lit.primitive,
&data.context.flow,
lit.interval,
format!("usage: {}", usage),
)?,
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
};
let user_password = format!("{}:{}", username, password);
let authorization = format!("Basic {}", base64::encode(user_password.as_bytes()));
let mut object = object.to_owned();
let mut header = HashMap::new();
header.insert(
"Authorization".to_owned(),
PrimitiveString::get_literal(&authorization, interval),
);
let literal = PrimitiveObject::get_literal(&header, interval);
insert_to_object(&header, &mut object, "header", &data.context.flow, &literal);
let mut result = PrimitiveObject::get_literal(&object.value, interval);
result.set_content_type("http");
Ok(result)
}
fn query(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "query(parameters: object) => http object";
if args.len() != 1 {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let literal = match args.get("arg0") {
Some(res) => res,
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
};
let mut object = object.to_owned();
let header = Literal::get_value::<HashMap<String, Literal>>(
&literal.primitive,
&data.context.flow,
interval,
ERROR_HTTP_QUERY.to_owned(),
)?;
insert_to_object(header, &mut object, "query", &data.context.flow, literal);
let mut result = PrimitiveObject::get_literal(&object.value, interval);
result.set_content_type("http");
Ok(result)
}
fn get_http(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "get() => http object";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let mut object = object.to_owned();
object.value.insert(
"method".to_owned(),
PrimitiveString::get_literal("get", interval),
);
object.value.remove("body");
let mut result = PrimitiveObject::get_literal(&object.value, interval);
result.set_content_type("http");
Ok(result)
}
fn post(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
_data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
match args.get("arg0") {
Some(body) => object.value.insert("body".to_owned(), body.to_owned()),
_ => object.value.remove("body"),
};
let mut object = object.to_owned();
object.value.insert(
"method".to_owned(),
PrimitiveString::get_literal("post", interval),
);
let mut result = PrimitiveObject::get_literal(&object.value, interval);
result.set_content_type("http");
Ok(result)
}
fn put(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
_data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
match args.get("arg0") {
Some(body) => object.value.insert("body".to_owned(), body.to_owned()),
_ => object.value.remove("body"),
};
let mut object = object.to_owned();
object.value.insert(
"method".to_owned(),
PrimitiveString::get_literal("put", interval),
);
let mut result = PrimitiveObject::get_literal(&object.value, interval);
result.set_content_type("http");
Ok(result)
}
fn delete(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
_data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
match args.get("arg0") {
Some(body) => object.value.insert("body".to_owned(), body.to_owned()),
_ => object.value.remove("body"),
};
let mut object = object.to_owned();
object.value.insert(
"method".to_owned(),
PrimitiveString::get_literal("delete", interval),
);
let mut result = PrimitiveObject::get_literal(&object.value, interval);
result.set_content_type("http");
Ok(result)
}
fn patch(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
_data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let body = match args.get("arg0") {
Some(res) => res.to_owned(),
_ => PrimitiveNull::get_literal(Interval::default()),
};
let mut object = object.to_owned();
object.value.insert(
"method".to_owned(),
PrimitiveString::get_literal("patch", interval),
);
object.value.insert("body".to_owned(), body);
let mut result = PrimitiveObject::get_literal(&object.value, interval);
result.set_content_type("http");
Ok(result)
}
fn send(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "send() => http object";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
if let Some(literal) = object.value.get("method") {
let function = match Literal::get_value::<String>(
&literal.primitive,
&data.context.flow,
interval,
ERROR_HTTP_UNKNOWN_METHOD.to_string(),
) {
Ok(delete) if delete == "delete" => ureq::delete,
Ok(put) if put == "put" => ureq::put,
Ok(patch) if patch == "patch" => ureq::patch,
Ok(post) if post == "post" => ureq::post,
Ok(get) if get == "get" => ureq::get,
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_HTTP_UNKNOWN_METHOD.to_string(),
))
}
};
let value = http_request(&object.value, function, &data.context.flow, interval)?;
return json_to_literal(&value, interval, &data.context.flow);
}
Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_HTTP_SEND.to_owned(),
))
}
}
impl PrimitiveObject {
fn credentials(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "credentials(username, password) => smtp object";
if args.len() < 2 {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let username = match args.get("arg0") {
Some(lit) => Literal::get_value::<String>(
&lit.primitive,
&data.context.flow,
lit.interval,
format!("usage: {}", usage),
)?,
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
};
let password = match args.get("arg1") {
Some(lit) => Literal::get_value::<String>(
&lit.primitive,
&data.context.flow,
lit.interval,
format!("usage: {}", usage),
)?,
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
};
let mut object = object.to_owned();
object.value.insert(
"username".to_owned(),
PrimitiveString::get_literal(username, interval),
);
object.value.insert(
"password".to_owned(),
PrimitiveString::get_literal(password, interval),
);
let mut result = PrimitiveObject::get_literal(&object.value, interval);
result.set_content_type("smtp");
Ok(result)
}
fn port(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "port(port) => smtp object";
if args.len() < 1 {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let port = match args.get("arg0") {
Some(lit) => Literal::get_value::<i64>(
&lit.primitive,
&data.context.flow,
lit.interval,
format!("usage: {}", usage),
)?,
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
};
let mut object = object.to_owned();
object.value.insert(
"port".to_owned(),
PrimitiveInt::get_literal(*port, interval),
);
let mut result = PrimitiveObject::get_literal(&object.value, interval);
result.set_content_type("smtp");
Ok(result)
}
fn smtp_tls(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "tls(BOOLEAN) => smtp object";
if args.len() < 1 {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let tls = match args.get("arg0") {
Some(lit) => Literal::get_value::<bool>(
&lit.primitive,
&data.context.flow,
lit.interval,
format!("usage: {}", usage),
)?,
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
};
let mut object = object.to_owned();
object.value.insert(
"tls".to_owned(),
PrimitiveBoolean::get_literal(*tls, interval),
);
let mut result = PrimitiveObject::get_literal(&object.value, interval);
result.set_content_type("smtp");
Ok(result)
}
fn smtp_send(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "send(email) => smtp object";
if args.len() < 1 {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let csml_email = match args.get("arg0") {
Some(lit) => Literal::get_value::<HashMap<String, Literal>>(
&lit.primitive,
&data.context.flow,
lit.interval,
format!("usage: {}", usage),
)?,
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
))
}
};
let email = tools_smtp::format_email(csml_email, data, interval)?;
let mailer = tools_smtp::get_mailer(&mut object.value, data, interval)?;
match mailer.send(&email) {
Ok(_) => Ok(PrimitiveBoolean::get_literal(true, interval)),
Err(e) => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("Could not send email: {:?}", e),
))
}
}
}
}
impl PrimitiveObject {
fn set_date_at(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
_data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let date = tools_time::get_date(args);
let date = Utc
.ymd(
date[0] as i32, // year
date[1] as u32, // month
date[2] as u32, // day
)
.and_hms_milli_opt(
date[3] as u32, // hour
date[4] as u32, // min
date[5] as u32, // sec
date[6] as u32, // milli
);
match date {
Some(date) => {
object.value.insert(
"milliseconds".to_owned(),
PrimitiveInt::get_literal(date.timestamp_millis(), interval),
);
let mut lit = PrimitiveObject::get_literal(&object.value, interval);
lit.set_content_type("time");
Ok(lit)
}
None => Ok(PrimitiveBoolean::get_literal(false, interval)),
}
}
fn unix(
object: &mut PrimitiveObject,
_args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "invalid value, use 'Time()' built-in to create a valid 'time' object";
match object.value.get("milliseconds") {
Some(lit) if lit.primitive.get_type() == PrimitiveType::PrimitiveInt => {
let millis = Literal::get_value::<i64>(
&lit.primitive,
&data.context.flow,
interval,
"".to_string(),
)?;
let date: DateTime<Utc> = Utc.timestamp_millis(*millis);
Ok(PrimitiveInt::get_literal(date.timestamp_millis(), interval))
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("{}", usage),
))
}
}
}
fn parse_date(
_object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
match args.len() {
1 => tools_time::parse_rfc3339(args, data, interval),
len if len >= 2 => tools_time::pasre_from_str(args, data, interval),
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!(
"usage: expect one ore two arguments :
Time().parse(\"2020-08-13\") or
Time().parse(\"1983-08-13 12:09:14.274\", \"%Y-%m-%d %H:%M:%S%.3f\")"
),
))
}
}
}
fn date_format(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "Time().format(format: String)";
let date: DateTime<Utc> = match object.value.get("milliseconds") {
Some(lit) if lit.primitive.get_type() == PrimitiveType::PrimitiveInt => {
let millis = Literal::get_value::<i64>(
&lit.primitive,
&data.context.flow,
interval,
"".to_string(),
)?;
Utc.timestamp_millis(*millis)
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
))
}
};
let formatted_date = match args.len() {
0 => date.to_rfc3339_opts(SecondsFormat::Millis, true),
_ => {
let format_lit = match args.get("arg0") {
Some(res) => res.to_owned(),
_ => PrimitiveNull::get_literal(Interval::default()),
};
let format = Literal::get_value::<String>(
&format_lit.primitive,
&data.context.flow,
interval,
"format parameter must be of type string".to_string(),
)?;
date.format(format).to_string()
}
};
Ok(PrimitiveString::get_literal(&formatted_date, interval))
}
}
impl PrimitiveObject {
fn jwt_sign(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let mut headers = jsonwebtoken::Header::default();
match args.get("arg0") {
Some(algo) if algo.primitive.get_type() == PrimitiveType::PrimitiveString => {
headers.alg = tools_jwt::get_algorithm(algo, &data.context.flow, interval)?;
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_JWT_SIGN_ALGO.to_string(),
))
}
}
let claims = match object.value.get("jwt") {
Some(literal) => literal.primitive.to_json(),
None => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_JWT_SIGN_CLAIMS.to_string(),
))
}
};
let key = match args.get("arg1") {
Some(key) if key.primitive.get_type() == PrimitiveType::PrimitiveString => {
let key = Literal::get_value::<String>(
&key.primitive,
&data.context.flow,
interval,
ERROR_JWT_SIGN_SECRET.to_string(),
)?;
jsonwebtoken::EncodingKey::from_secret(key.as_ref())
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_JWT_ALGO.to_string(),
))
}
};
if let Some(lit) = args.get("arg2") {
tools_jwt::get_headers(lit, &data.context.flow, interval, &mut headers)?;
}
match jsonwebtoken::encode(&headers, &claims, &key) {
Ok(value) => Ok(PrimitiveString::get_literal(&value, interval)),
Err(e) => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("Invalid JWT encode {:?}", e.kind()),
))
}
}
}
fn jwt_decode(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let token = match object.value.get("jwt") {
Some(literal) => Literal::get_value::<String>(
&literal.primitive,
&data.context.flow,
interval,
ERROR_JWT_TOKEN.to_owned(),
)?,
None => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_JWT_TOKEN.to_string(),
))
}
};
let algo = match args.get("arg0") {
Some(algo) if algo.primitive.get_type() == PrimitiveType::PrimitiveString => {
tools_jwt::get_algorithm(algo, &data.context.flow, interval)?
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_JWT_DECODE_ALGO.to_string(),
))
}
};
let key = match args.get("arg1") {
Some(key) if key.primitive.get_type() == PrimitiveType::PrimitiveString => {
let key = Literal::get_value::<String>(
&key.primitive,
&data.context.flow,
interval,
ERROR_JWT_DECODE_SECRET.to_owned(),
)?;
jsonwebtoken::DecodingKey::from_secret(key.as_ref())
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_JWT_DECODE_SECRET.to_string(),
))
}
};
match jsonwebtoken::decode::<serde_json::Value>(
token,
&key,
&jsonwebtoken::Validation::new(algo),
) {
Ok(token_message) => {
tools_jwt::token_data_to_literal(token_message, &data.context.flow, interval)
}
Err(e) => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("Invalid JWT decode {:?}", e.kind()),
))
}
}
}
fn jwt_verity(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let mut validation = jsonwebtoken::Validation::default();
let token = match object.value.get("jwt") {
Some(literal) => Literal::get_value::<String>(
&literal.primitive,
&data.context.flow,
interval,
ERROR_JWT_TOKEN.to_owned(),
)?,
None => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_JWT_TOKEN.to_string(),
))
}
};
match args.get("arg0") {
Some(lit) => {
tools_jwt::get_validation(lit, &data.context.flow, interval, &mut validation)?
}
None => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_JWT_VALIDATION_CLAIMS.to_string(),
))
}
}
match args.get("arg1") {
Some(algo) if algo.primitive.get_type() == PrimitiveType::PrimitiveString => {
validation.algorithms = vec![tools_jwt::get_algorithm(
algo,
&data.context.flow,
interval,
)?];
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_JWT_VALIDATION_ALGO.to_string(),
))
}
};
let key = match args.get("arg2") {
Some(key) if key.primitive.get_type() == PrimitiveType::PrimitiveString => {
let key = Literal::get_value::<String>(
&key.primitive,
&data.context.flow,
interval,
ERROR_JWT_SECRET.to_owned(),
)?;
jsonwebtoken::DecodingKey::from_secret(key.as_ref())
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_JWT_VALIDATION_SECRETE.to_string(),
))
}
};
match jsonwebtoken::decode::<serde_json::Value>(token, &key, &validation) {
Ok(token_message) => {
tools_jwt::token_data_to_literal(token_message, &data.context.flow, interval)
}
Err(e) => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("Invalid JWT verify {:?}", e.kind()),
))
}
}
}
}
impl PrimitiveObject {
fn create_hmac(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let flow_name = &data.context.flow;
let data = match object.value.get("value") {
Some(literal) => Literal::get_value::<String>(
&literal.primitive,
flow_name,
interval,
ERROR_HASH.to_owned(),
)?,
None => {
return Err(gen_error_info(
Position::new(interval, flow_name),
ERROR_HASH.to_string(),
))
}
};
let algo = match args.get("arg0") {
Some(algo) if algo.primitive.get_type() == PrimitiveType::PrimitiveString => {
let algo = Literal::get_value::<String>(
&algo.primitive,
flow_name,
interval,
ERROR_HASH_ALGO.to_owned(),
)?;
tools_crypto::get_hash_algorithm(algo, flow_name, interval)?
}
_ => {
return Err(gen_error_info(
Position::new(interval, flow_name),
ERROR_HASH_ALGO.to_string(),
))
}
};
let key = match args.get("arg1") {
Some(algo) if algo.primitive.get_type() == PrimitiveType::PrimitiveString => {
let secret = Literal::get_value::<String>(
&algo.primitive,
flow_name,
interval,
ERROR_HMAC_KEY.to_owned(),
)?;
openssl::pkey::PKey::hmac(secret.as_bytes()).unwrap()
}
_ => {
return Err(gen_error_info(
Position::new(interval, flow_name),
ERROR_HMAC_KEY.to_string(),
))
}
};
let sign = openssl::sign::Signer::new(algo, &key);
match sign {
Ok(mut signer) => {
signer.update(data.as_bytes()).unwrap();
let vec = signer
.sign_to_vec()
.unwrap()
.iter()
.map(|val| PrimitiveInt::get_literal(val.clone() as i64, interval))
.collect::<Vec<Literal>>();
let mut map = HashMap::new();
map.insert(
"hash".to_string(),
PrimitiveArray::get_literal(&vec, interval),
);
let mut lit = PrimitiveObject::get_literal(&map, interval);
lit.set_content_type("crypto");
Ok(lit)
}
Err(e) => {
return Err(gen_error_info(
Position::new(interval, flow_name),
format!("{}", e),
))
}
}
}
fn create_hash(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let flow_name = &data.context.flow;
let data = match object.value.get("value") {
Some(literal) => Literal::get_value::<String>(
&literal.primitive,
&data.context.flow,
interval,
ERROR_HASH.to_owned(),
)?,
None => {
return Err(gen_error_info(
Position::new(interval, flow_name),
ERROR_HASH.to_string(),
))
}
};
let algo = match args.get("arg0") {
Some(algo) if algo.primitive.get_type() == PrimitiveType::PrimitiveString => {
let algo = Literal::get_value::<String>(
&algo.primitive,
flow_name,
interval,
ERROR_HASH_ALGO.to_owned(),
)?;
tools_crypto::get_hash_algorithm(algo, flow_name, interval)?
}
_ => {
return Err(gen_error_info(
Position::new(interval, flow_name),
ERROR_HASH_ALGO.to_string(),
))
}
};
match openssl::hash::hash(algo, data.as_bytes()) {
Ok(digest_bytes) => {
let vec = digest_bytes
.to_vec()
.iter()
.map(|val| PrimitiveInt::get_literal(*val as i64, interval))
.collect::<Vec<Literal>>();
let mut map = HashMap::new();
map.insert(
"hash".to_string(),
PrimitiveArray::get_literal(&vec, interval),
);
let mut lit = PrimitiveObject::get_literal(&map, interval);
lit.set_content_type("crypto");
Ok(lit)
}
Err(e) => {
return Err(gen_error_info(
Position::new(interval, flow_name),
format!("{}", e),
))
}
}
}
fn digest(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let vec = match object.value.get("hash") {
Some(literal) => Literal::get_value::<Vec<Literal>>(
&literal.primitive,
&data.context.flow,
interval,
ERROR_DIGEST.to_owned(),
)?,
None => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_DIGEST.to_string(),
))
}
};
let algo = match args.get("arg0") {
Some(algo) if algo.primitive.get_type() == PrimitiveType::PrimitiveString => {
Literal::get_value::<String>(
&algo.primitive,
&data.context.flow,
interval,
ERROR_DIGEST_ALGO.to_owned(),
)?
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_DIGEST_ALGO.to_string(),
))
}
};
let mut digest_data = vec![];
for value in vec.iter() {
digest_data.push(*Literal::get_value::<i64>(
&value.primitive,
&data.context.flow,
interval,
"ERROR_hash_TOKEN".to_owned(),
)? as u8);
}
let value = tools_crypto::digest_data(algo, &digest_data, &data.context.flow, interval)?;
Ok(PrimitiveString::get_literal(&value, interval))
}
}
impl PrimitiveObject {
fn base64_encode(
object: &mut PrimitiveObject,
_args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "Base64(\"...\").encode() => String";
let string = match object.value.get("string") {
Some(lit) => lit.primitive.to_string(),
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
))
}
};
let result = base64::encode(string.as_bytes());
Ok(PrimitiveString::get_literal(&result, interval))
}
fn base64_decode(
object: &mut PrimitiveObject,
_args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "Base64(\"...\").decode() => String";
let string = match object.value.get("string") {
Some(lit) => lit.primitive.to_string(),
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
))
}
};
let result = match base64::decode(string.as_bytes()) {
Ok(buf) => format!("{}", String::from_utf8_lossy(&buf)),
Err(_) => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("Base64 invalid value: {}, can't be decode", string),
))
}
};
Ok(PrimitiveString::get_literal(&result, interval))
}
}
impl PrimitiveObject {
fn hex_encode(
object: &mut PrimitiveObject,
_args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "Hex(\"...\").encode() => String";
let string = match object.value.get("string") {
Some(lit) => lit.primitive.to_string(),
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
))
}
};
let result = hex::encode(string.as_bytes());
Ok(PrimitiveString::get_literal(&result, interval))
}
fn hex_decode(
object: &mut PrimitiveObject,
_args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "Hex(\"...\").decode() => String";
let string = match object.value.get("string") {
Some(lit) => lit.primitive.to_string(),
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
))
}
};
let result = match hex::decode(string.as_bytes()) {
Ok(buf) => format!("{}", String::from_utf8_lossy(&buf)),
Err(_) => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("Hex invalid value: {}, can't be decode", string),
))
}
};
Ok(PrimitiveString::get_literal(&result, interval))
}
}
impl PrimitiveObject {
fn get_type(
_object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "get_type() => string";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
Ok(PrimitiveString::get_literal(content_type, interval))
}
fn get_content(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "get_content() => object";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
Ok(Literal {
content_type: content_type.to_owned(),
primitive: Box::new(object.clone()),
interval,
})
}
fn is_email(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "is_email() => boolean";
let text = match object.value.get("text") {
Some(lit) if lit.content_type == "string" => lit.primitive.to_string(),
_ => return Ok(PrimitiveBoolean::get_literal(false, interval)),
};
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let email_regex = Regex::new(
r"^([a-zA-Z0-9_+]([a-zA-Z0-9_+.]*[a-zA-Z0-9_+])?)@([a-zA-Z0-9]+([\-\.]{1}[a-zA-Z0-9]+)*\.[a-zA-Z]{2,6})",
)
.unwrap();
let lit = PrimitiveBoolean::get_literal(email_regex.is_match(&text), interval);
Ok(lit)
}
fn match_args(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "match(a) => a";
let lit = match (object.value.get("text"), object.value.get("payload")) {
(Some(lit), _) | (_, Some(lit)) if lit.content_type == "string" => lit,
_ => return Ok(PrimitiveNull::get_literal(interval)),
};
if args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let is_match = args.iter().find(|(_name, arg)| match_obj(lit, arg));
match is_match {
Some((_, lit)) => Ok(lit.to_owned()),
None => Ok(PrimitiveNull::get_literal(interval)),
}
}
fn match_array(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "match_array([a,b,c]) => a";
let lit = match (object.value.get("text"), object.value.get("payload")) {
(Some(lit), _) | (_, Some(lit)) if lit.content_type == "string" => lit,
_ => return Ok(PrimitiveNull::get_literal(interval)),
};
let array = match args.get("arg0") {
Some(lit) => Literal::get_value::<Vec<Literal>>(
&lit.primitive,
&data.context.flow,
interval,
format!("expect Array value as argument usage: {}", usage),
)?,
None => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("expect Array value as argument usage: {}", usage),
))
}
};
let is_match = array.iter().find(|&arg| match_obj(lit, arg));
match is_match {
Some(lit) => Ok(lit.to_owned()),
None => Ok(PrimitiveNull::get_literal(interval)),
}
}
}
impl PrimitiveObject {
fn is_number(
_object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "is_number() => boolean";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
Ok(PrimitiveBoolean::get_literal(false, interval))
}
fn is_int(
_object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "is_int() => boolean";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
Ok(PrimitiveBoolean::get_literal(false, interval))
}
fn is_float(
_object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "is_float() => boolean";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
Ok(PrimitiveBoolean::get_literal(false, interval))
}
fn type_of(
_object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "type_of() => string";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
Ok(PrimitiveString::get_literal("object", interval))
}
fn to_string(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "to_string() => string";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
Ok(PrimitiveString::get_literal(&object.to_string(), interval))
}
fn contains(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "contains(key: string) => boolean";
if args.len() != 1 {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let key = match args.get("arg0") {
Some(res) if res.primitive.get_type() == PrimitiveType::PrimitiveString => {
Literal::get_value::<String>(
&res.primitive,
&data.context.flow,
interval,
ERROR_OBJECT_CONTAINS.to_owned(),
)?
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_OBJECT_CONTAINS.to_owned(),
));
}
};
let result = object.value.contains_key(key);
Ok(PrimitiveBoolean::get_literal(result, interval))
}
fn is_empty(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "is_empty() => boolean";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let result = object.value.is_empty();
Ok(PrimitiveBoolean::get_literal(result, interval))
}
fn length(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "length() => int";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let result = object.value.len();
Ok(PrimitiveInt::get_literal(result as i64, interval))
}
fn keys(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "keys() => array";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let mut result = Vec::new();
for key in object.value.keys() {
result.push(PrimitiveString::get_literal(key, interval));
}
Ok(PrimitiveArray::get_literal(&result, interval))
}
fn values(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "values() => array";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let mut result = Vec::new();
for value in object.value.values() {
result.push(value.to_owned());
}
Ok(PrimitiveArray::get_literal(&result, interval))
}
fn get_generics(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "get(key: string) => primitive";
if args.len() != 1 {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let key = match args.get("arg0") {
Some(res) if res.primitive.get_type() == PrimitiveType::PrimitiveString => {
Literal::get_value::<String>(
&res.primitive,
&data.context.flow,
interval,
ERROR_OBJECT_GET_GENERICS.to_owned(),
)?
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_OBJECT_GET_GENERICS.to_owned(),
));
}
};
match object.value.get(key) {
Some(res) => Ok(res.to_owned()),
None => Ok(PrimitiveNull::get_literal(interval)),
}
}
}
impl PrimitiveObject {
fn clear_values(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "clear_values() => null";
if !args.is_empty() {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let mut vector: Vec<String> = Vec::new();
for key in object.value.keys() {
vector.push(key.to_owned());
}
for key in vector.iter() {
object
.value
.insert(key.to_owned(), PrimitiveNull::get_literal(interval));
}
Ok(PrimitiveNull::get_literal(interval))
}
fn insert(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "insert(key: string, value: primitive) => null";
if args.len() != 2 {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let key = match args.get("arg0") {
Some(res) if res.primitive.get_type() == PrimitiveType::PrimitiveString => {
Literal::get_value::<String>(
&res.primitive,
&data.context.flow,
interval,
ERROR_OBJECT_INSERT.to_owned(),
)?
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_OBJECT_INSERT.to_owned(),
));
}
};
let value = match args.get("arg1") {
Some(res) => res,
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
};
object.value.insert(key.to_owned(), value.to_owned());
Ok(PrimitiveNull::get_literal(interval))
}
fn remove(
object: &mut PrimitiveObject,
args: &HashMap<String, Literal>,
data: &mut Data,
interval: Interval,
_content_type: &str,
) -> Result<Literal, ErrorInfo> {
let usage = "remove(key: string) => primitive";
if args.len() != 1 {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("usage: {}", usage),
));
}
let key = match args.get("arg0") {
Some(res) if res.primitive.get_type() == PrimitiveType::PrimitiveString => {
Literal::get_value::<String>(
&res.primitive,
&data.context.flow,
interval,
ERROR_OBJECT_REMOVE.to_owned(),
)?
}
_ => {
return Err(gen_error_info(
Position::new(interval, &data.context.flow),
ERROR_OBJECT_REMOVE.to_owned(),
));
}
};
match object.value.remove(key) {
Some(value) => Ok(value),
None => Ok(PrimitiveNull::get_literal(interval)),
}
}
}
////////////////////////////////////////////////////////////////////////////////
// PRIVATE FUNCTION
////////////////////////////////////////////////////////////////////////////////
fn insert_to_object(
src: &HashMap<String, Literal>,
dst: &mut PrimitiveObject,
key_name: &str,
flow_name: &str,
literal: &Literal,
) {
dst.value
.entry(key_name.to_owned())
.and_modify(|tmp: &mut Literal| {
if let Ok(tmp) = Literal::get_mut_value::<HashMap<String, Literal>>(
&mut tmp.primitive,
flow_name,
literal.interval,
ERROR_UNREACHABLE.to_owned(),
) {
for (key, value) in src.iter() {
tmp.insert(key.to_owned(), value.to_owned());
}
}
})
.or_insert_with(|| literal.to_owned());
}
////////////////////////////////////////////////////////////////////////////////
// PUBLIC FUNCTIONS
////////////////////////////////////////////////////////////////////////////////
impl PrimitiveObject {
pub fn new(value: &HashMap<String, Literal>) -> Self {
Self {
value: value.to_owned(),
}
}
pub fn get_literal(object: &HashMap<String, Literal>, interval: Interval) -> Literal {
let primitive = Box::new(PrimitiveObject::new(object));
Literal {
content_type: "object".to_owned(),
primitive,
interval,
}
}
}
////////////////////////////////////////////////////////////////////////////////
// TRAIT FUNCTIONS
////////////////////////////////////////////////////////////////////////////////
#[typetag::serde]
impl Primitive for PrimitiveObject {
fn is_eq(&self, other: &dyn Primitive) -> bool {
if let Some(other) = other.as_any().downcast_ref::<Self>() {
return self.value == other.value;
}
false
}
fn is_cmp(&self, _other: &dyn Primitive) -> Option<Ordering> {
None
}
fn do_add(&self, other: &dyn Primitive) -> Result<Box<dyn Primitive>, String> {
Err(format!(
"{} {:?} + {:?}",
ERROR_ILLEGAL_OPERATION,
self.get_type(),
other.get_type()
))
}
fn do_sub(&self, other: &dyn Primitive) -> Result<Box<dyn Primitive>, String> {
Err(format!(
"{} {:?} - {:?}",
ERROR_ILLEGAL_OPERATION,
self.get_type(),
other.get_type()
))
}
fn do_div(&self, other: &dyn Primitive) -> Result<Box<dyn Primitive>, String> {
Err(format!(
"{} {:?} / {:?}",
ERROR_ILLEGAL_OPERATION,
self.get_type(),
other.get_type()
))
}
fn do_mul(&self, other: &dyn Primitive) -> Result<Box<dyn Primitive>, String> {
Err(format!(
"{} {:?} * {:?}",
ERROR_ILLEGAL_OPERATION,
self.get_type(),
other.get_type()
))
}
fn do_rem(&self, other: &dyn Primitive) -> Result<Box<dyn Primitive>, String> {
Err(format!(
"{} {:?} % {:?}",
ERROR_ILLEGAL_OPERATION,
self.get_type(),
other.get_type()
))
}
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
fn get_type(&self) -> PrimitiveType {
PrimitiveType::PrimitiveObject
}
fn as_box_clone(&self) -> Box<dyn Primitive> {
Box::new((*self).clone())
}
fn to_json(&self) -> serde_json::Value {
let mut object: serde_json::map::Map<String, serde_json::Value> =
serde_json::map::Map::new();
for (key, literal) in self.value.iter() {
if !TYPES.contains(&&(*literal.content_type)) {
let mut map = serde_json::Map::new();
map.insert(
"content_type".to_owned(),
serde_json::json!(literal.content_type),
);
map.insert("content".to_owned(), literal.primitive.to_json());
object.insert(key.to_owned(), serde_json::json!(map));
} else {
object.insert(key.to_owned(), literal.primitive.to_json());
}
}
serde_json::Value::Object(object)
}
fn format_mem(&self, content_type: &str, first: bool) -> serde_json::Value {
let mut object: serde_json::map::Map<String, serde_json::Value> =
serde_json::map::Map::new();
match (content_type, first) {
(content_type, false) if content_type == "object" => {
for (key, literal) in self.value.iter() {
let content_type = &literal.content_type;
object.insert(
key.to_owned(),
literal.primitive.format_mem(content_type, false),
);
}
serde_json::Value::Object(object)
}
(content_type, _) => {
let mut map: serde_json::Map<String, serde_json::Value> = serde_json::Map::new();
map.insert("_content_type".to_owned(), serde_json::json!(content_type));
for (key, literal) in self.value.iter() {
let content_type = &literal.content_type;
object.insert(
key.to_owned(),
literal.primitive.format_mem(content_type, false),
);
}
map.insert("_content".to_owned(), serde_json::Value::Object(object));
serde_json::Value::Object(map)
}
}
}
fn to_string(&self) -> String {
self.to_json().to_string()
}
fn as_bool(&self) -> bool {
true
}
fn get_value(&self) -> &dyn std::any::Any {
&self.value
}
fn get_mut_value(&mut self) -> &mut dyn std::any::Any {
&mut self.value
}
fn to_msg(&self, content_type: String) -> Message {
Message {
content_type,
content: self.to_json(),
}
}
fn do_exec(
&mut self,
name: &str,
args: &HashMap<String, Literal>,
interval: Interval,
content_type: &ContentType,
data: &mut Data,
msg_data: &mut MessageData,
sender: &Option<mpsc::Sender<MSG>>,
) -> Result<(Literal, Right), ErrorInfo> {
let event = vec![FUNCTIONS_EVENT];
let http = vec![FUNCTIONS_HTTP, FUNCTIONS_READ, FUNCTIONS_WRITE];
let smtp = vec![FUNCTIONS_SMTP];
let base64 = vec![FUNCTIONS_BASE64];
let hex = vec![FUNCTIONS_HEX];
let jwt = vec![FUNCTIONS_JWT];
let crypto = vec![FUNCTIONS_CRYPTO];
let time = vec![FUNCTIONS_TIME];
let generics = vec![FUNCTIONS_READ, FUNCTIONS_WRITE];
let mut is_event = false;
let (content_type, vector) = match content_type {
ContentType::Event(event_type) => {
is_event = true;
(event_type.as_ref(), event)
}
ContentType::Http => ("", http),
ContentType::Smtp => ("", smtp),
ContentType::Base64 => ("", base64),
ContentType::Hex => ("", hex),
ContentType::Jwt => ("", jwt),
ContentType::Crypto => ("", crypto),
ContentType::Time => ("", time),
ContentType::Primitive => ("", generics),
};
for function in vector.iter() {
if let Some((f, right)) = function.get(name) {
let result = f(self, args, data, interval, &content_type)?;
return Ok((result, *right));
}
}
if is_event {
let vec = ["text", "payload"];
for value in vec.iter() {
if let Some(res) = self.value.get_mut(*value) {
return res.primitive.do_exec(
name,
args,
interval,
&ContentType::Primitive,
data,
msg_data,
sender,
);
}
}
}
Err(gen_error_info(
Position::new(interval, &data.context.flow),
format!("[{}] {}", name, ERROR_OBJECT_UNKNOWN_METHOD),
))
}
}
| 31.918782 | 117 | 0.49468 |
757fec3993759d72dfb9b5041fb7d81d071b3616 | 1,474 |
pub struct IconBrightness2 {
props: crate::Props,
}
impl yew::Component for IconBrightness2 {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
{
false
}
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0z" fill="none"/><path d="M10 2c-1.82 0-3.53.5-5 1.35C7.99 5.08 10 8.3 10 12s-2.01 6.92-5 8.65C6.47 21.5 8.18 22 10 22c5.52 0 10-4.48 10-10S15.52 2 10 2z"/></svg>
</svg>
}
}
}
| 32.043478 | 277 | 0.571913 |
797c70bc74614c1c4f89df3996c289b6fcebb0de | 6,056 | /// This module parses a raw `tt::TokenStream` into macro-by-example token
/// stream. This is a *mostly* identify function, expect for handling of
/// `$var:tt_kind` and `$(repeat),*` constructs.
use crate::ParseError;
use crate::tt_cursor::TtCursor;
pub(crate) fn parse(tt: &tt::Subtree) -> Result<crate::MacroRules, ParseError> {
let mut parser = TtCursor::new(tt);
let mut rules = Vec::new();
while !parser.is_eof() {
rules.push(parse_rule(&mut parser)?);
if let Err(e) = parser.expect_char(';') {
if !parser.is_eof() {
return Err(e);
}
break;
}
}
Ok(crate::MacroRules { rules })
}
fn parse_rule(p: &mut TtCursor) -> Result<crate::Rule, ParseError> {
let lhs = parse_subtree(p.eat_subtree()?, false)?;
p.expect_char('=')?;
p.expect_char('>')?;
let mut rhs = parse_subtree(p.eat_subtree()?, true)?;
rhs.delimiter = crate::Delimiter::None;
Ok(crate::Rule { lhs, rhs })
}
fn is_boolean_literal(lit: Option<&tt::TokenTree>) -> bool {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) = lit {
if lit.text == "true" || lit.text == "false" {
return true;
}
}
false
}
fn parse_subtree(tt: &tt::Subtree, transcriber: bool) -> Result<crate::Subtree, ParseError> {
let mut token_trees = Vec::new();
let mut p = TtCursor::new(tt);
while let Some(tt) = p.eat() {
let child: crate::TokenTree = match tt {
tt::TokenTree::Leaf(leaf) => match leaf {
tt::Leaf::Punct(tt::Punct { char: '$', spacing }) => {
// mbe var can be an ident or keyword, including `true` and `false`
if p.at_ident().is_some() || is_boolean_literal(p.current()) {
crate::Leaf::from(parse_var(&mut p, transcriber)?).into()
} else if let Some(tt::TokenTree::Subtree(_)) = p.current() {
parse_repeat(&mut p, transcriber)?.into()
} else {
// Treat it as normal punct
crate::Leaf::from(tt::Punct { char: '$', spacing: *spacing }).into()
}
}
tt::Leaf::Punct(punct) => crate::Leaf::from(*punct).into(),
tt::Leaf::Ident(tt::Ident { text, id: _ }) => {
crate::Leaf::from(crate::Ident { text: text.clone() }).into()
}
tt::Leaf::Literal(tt::Literal { text }) => {
crate::Leaf::from(crate::Literal { text: text.clone() }).into()
}
},
tt::TokenTree::Subtree(subtree) => parse_subtree(&subtree, transcriber)?.into(),
};
token_trees.push(child);
}
Ok(crate::Subtree { token_trees, delimiter: tt.delimiter })
}
fn parse_var(p: &mut TtCursor, transcriber: bool) -> Result<crate::Var, ParseError> {
let text = {
if is_boolean_literal(p.current()) {
let lit = p.eat_literal().unwrap();
lit.text.clone()
} else {
let ident = p.eat_ident().unwrap();
ident.text.clone()
}
};
let kind = if !transcriber && p.at_char(':') {
p.bump();
if let Some(ident) = p.eat_ident() {
Some(ident.text.clone())
} else {
p.rev_bump();
None
}
} else {
None
};
Ok(crate::Var { text, kind })
}
fn mk_repeat(
rep: char,
subtree: crate::Subtree,
separator: Option<crate::Separator>,
) -> Result<crate::Repeat, ParseError> {
let kind = match rep {
'*' => crate::RepeatKind::ZeroOrMore,
'+' => crate::RepeatKind::OneOrMore,
'?' => crate::RepeatKind::ZeroOrOne,
_ => return Err(ParseError::Expected(String::from("repeat"))),
};
Ok(crate::Repeat { subtree, kind, separator })
}
fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, ParseError> {
let subtree = p.eat_subtree()?;
let mut subtree = parse_subtree(subtree, transcriber)?;
subtree.delimiter = crate::Delimiter::None;
if let Some(rep) = p.at_punct() {
match rep.char {
'*' | '+' | '?' => {
p.bump();
return mk_repeat(rep.char, subtree, None);
}
_ => {}
}
}
let sep = p.eat_seperator().ok_or(ParseError::Expected(String::from("separator")))?;
let rep = p.eat_punct().ok_or(ParseError::Expected(String::from("repeat")))?;
mk_repeat(rep.char, subtree, Some(sep))
}
#[cfg(test)]
mod tests {
use ra_syntax::{ast, AstNode};
use super::*;
use crate::ast_to_token_tree;
#[test]
fn test_invalid_parse() {
expect_err("invalid", "subtree");
is_valid("($i:ident) => ()");
is_valid("($($i:ident)*) => ($_)");
is_valid("($($true:ident)*) => ($true)");
is_valid("($($false:ident)*) => ($false)");
expect_err("$i:ident => ()", "subtree");
expect_err("($i:ident) ()", "`=`");
expect_err("($($i:ident)_) => ()", "repeat");
}
fn expect_err(macro_body: &str, expected: &str) {
assert_eq!(
create_rules(&format_macro(macro_body)),
Err(ParseError::Expected(String::from(expected)))
);
}
fn is_valid(macro_body: &str) {
assert!(create_rules(&format_macro(macro_body)).is_ok());
}
fn format_macro(macro_body: &str) -> String {
format!(
"
macro_rules! foo {{
{}
}}
",
macro_body
)
}
fn create_rules(macro_definition: &str) -> Result<crate::MacroRules, ParseError> {
let source_file = ast::SourceFile::parse(macro_definition);
let macro_definition =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
parse(&definition_tt)
}
}
| 32.385027 | 100 | 0.53253 |
d601a2c3ddf677a730eb8c8b206222c98bdb2af1 | 496 | table! {
allowed_states (state_id) {
state_id -> Integer,
state_type -> Text,
input -> Integer,
output -> Integer,
high -> Integer,
low -> Integer,
}
}
table! {
gpio_state (gpio_id) {
gpio_id -> Integer,
in_use -> Integer,
gpio_mode -> Nullable<Text>,
gpio_level -> Nullable<Text>,
last_change -> Nullable<Timestamp>,
}
}
allow_tables_to_appear_in_same_query!(allowed_states, gpio_state,);
| 21.565217 | 67 | 0.568548 |
9022a8821ebc9c63dab1260ede493374bc37c7c5 | 5,519 | use crate::{
self as http,
client_handle::SetClientHandle,
glue::{HyperServerSvc, UpgradeBody},
h2::Settings as H2Settings,
trace, upgrade, Version,
};
use linkerd_drain as drain;
use linkerd_error::Error;
use linkerd_io::{self as io, PeerAddr};
use linkerd_stack::{layer, NewService};
use std::{
future::Future,
pin::Pin,
task::{Context, Poll},
};
use tower::Service;
use tracing::debug;
type Server = hyper::server::conn::Http<trace::Executor>;
#[derive(Clone, Debug)]
pub struct NewServeHttp<N> {
inner: N,
server: Server,
drain: drain::Watch,
}
#[derive(Clone, Debug)]
pub struct ServeHttp<S> {
version: Version,
server: Server,
inner: S,
drain: drain::Watch,
}
// === impl NewServeHttp ===
impl<N> NewServeHttp<N> {
pub fn layer(
h2: H2Settings,
drain: drain::Watch,
) -> impl layer::Layer<N, Service = Self> + Clone {
layer::mk(move |inner| Self::new(h2, inner, drain.clone()))
}
/// Creates a new `ServeHttp`.
fn new(h2: H2Settings, inner: N, drain: drain::Watch) -> Self {
let mut server = hyper::server::conn::Http::new().with_executor(trace::Executor::new());
server
.http2_initial_stream_window_size(h2.initial_stream_window_size)
.http2_initial_connection_window_size(h2.initial_connection_window_size);
// Configure HTTP/2 PING frames
if let Some(timeout) = h2.keepalive_timeout {
// XXX(eliza): is this a reasonable interval between
// PING frames?
let interval = timeout / 4;
server
.http2_keep_alive_timeout(timeout)
.http2_keep_alive_interval(interval);
}
Self {
inner,
server,
drain,
}
}
}
impl<T, N> NewService<(Version, T)> for NewServeHttp<N>
where
N: NewService<(Version, T)> + Clone,
{
type Service = ServeHttp<N::Service>;
fn new_service(&mut self, (version, target): (Version, T)) -> Self::Service {
debug!(?version, "Creating HTTP service");
let inner = self.inner.new_service((version, target));
ServeHttp {
inner,
version,
server: self.server.clone(),
drain: self.drain.clone(),
}
}
}
// === impl ServeHttp ===
impl<I, S> Service<I> for ServeHttp<S>
where
I: io::AsyncRead + io::AsyncWrite + PeerAddr + Send + Unpin + 'static,
S: Service<http::Request<UpgradeBody>, Response = http::Response<http::BoxBody>, Error = Error>
+ Clone
+ Unpin
+ Send
+ 'static,
S::Future: Send + 'static,
{
type Response = ();
type Error = Error;
type Future = Pin<Box<dyn Future<Output = Result<(), Error>> + Send + 'static>>;
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, io: I) -> Self::Future {
let Self {
version,
inner,
drain,
mut server,
} = self.clone();
debug!(?version, "Handling as HTTP");
Box::pin(async move {
let (svc, closed) = SetClientHandle::new(io.peer_addr()?, inner.clone());
match version {
Version::Http1 => {
// Enable support for HTTP upgrades (CONNECT and websockets).
let mut conn = server
.http1_only(true)
.serve_connection(io, upgrade::Service::new(svc, drain.clone()))
.with_upgrades();
tokio::select! {
res = &mut conn => {
debug!(?res, "The client is shutting down the connection");
res?
}
shutdown = drain.signaled() => {
debug!("The process is shutting down the connection");
Pin::new(&mut conn).graceful_shutdown();
shutdown.release_after(conn).await?;
}
() = closed => {
debug!("The stack is tearing down the connection");
Pin::new(&mut conn).graceful_shutdown();
conn.await?;
}
}
}
Version::H2 => {
let mut conn = server
.http2_only(true)
.serve_connection(io, HyperServerSvc::new(svc));
tokio::select! {
res = &mut conn => {
debug!(?res, "The client is shutting down the connection");
res?
}
shutdown = drain.signaled() => {
debug!("The process is shutting down the connection");
Pin::new(&mut conn).graceful_shutdown();
shutdown.release_after(conn).await?;
}
() = closed => {
debug!("The stack is tearing down the connection");
Pin::new(&mut conn).graceful_shutdown();
conn.await?;
}
}
}
}
Ok(())
})
}
}
| 32.087209 | 99 | 0.48487 |
6adee8290d7e08e1b54a67120f78bbab61e63c31 | 3,216 | use std::io::{Error, ErrorKind, Result, SeekFrom};
use crate::util::{ReadSeek, Shared, reader_read_val};
#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
#[repr(C)]
pub struct Header {
pub magic: u32,
pub file_count: u32,
pub string_table_size: u32,
pub reserved: [u8; 0x4]
}
impl Header {
pub const MAGIC: u32 = u32::from_le_bytes(*b"PFS0");
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
#[repr(C)]
pub struct FileEntry {
pub offset: u64,
pub size: usize,
pub string_table_offset: u32,
pub reserved: [u8; 0x4]
}
pub struct PFS0 {
reader: Shared<dyn ReadSeek>,
header: Header,
file_entries: Vec<FileEntry>,
string_table: Vec<u8>
}
impl PFS0 {
pub fn new(reader: Shared<dyn ReadSeek>) -> Result<Self> {
let header: Header = reader_read_val(&reader)?;
if header.magic != Header::MAGIC {
return Err(Error::new(ErrorKind::InvalidInput, "Invalid PFS0 magic"));
}
let mut file_entries: Vec<FileEntry> = Vec::with_capacity(header.file_count as usize);
for _ in 0..header.file_count {
let file_entry: FileEntry = reader_read_val(&reader)?;
file_entries.push(file_entry);
}
let mut str_table = vec![0u8; header.string_table_size as usize];
reader.lock().unwrap().read_exact(&mut str_table)?;
Ok(Self {
reader: reader,
header: header,
file_entries: file_entries,
string_table: str_table
})
}
pub fn list_files(&self) -> Result<Vec<String>> {
let mut file_names: Vec<String> = Vec::with_capacity(self.file_entries.len());
for entry in self.file_entries.iter() {
let mut bytes: Vec<u8> = Vec::new();
let str_t = &self.string_table[entry.string_table_offset as usize..];
for i in 0..str_t.len() {
if str_t[i] == 0 {
break;
}
bytes.push(str_t[i]);
}
file_names.push(String::from_utf8(bytes).unwrap());
}
Ok(file_names)
}
pub fn get_file_size(&mut self, idx: usize) -> Result<usize> {
if idx >= self.file_entries.len() {
return Err(Error::new(ErrorKind::InvalidInput, "Invalid file index"));
}
Ok(self.file_entries[idx].size)
}
pub fn read_file(&mut self, idx: usize, offset: usize, buf: &mut [u8]) -> Result<usize> {
if idx >= self.file_entries.len() {
return Err(Error::new(ErrorKind::InvalidInput, "Invalid file index"));
}
let entry = &self.file_entries[idx];
if (offset + buf.len()) > entry.size {
return Err(Error::new(ErrorKind::UnexpectedEof, "EOF reached"));
}
let base_offset = std::mem::size_of::<Header>() + std::mem::size_of::<FileEntry>() * self.header.file_count as usize + self.header.string_table_size as usize;
let base_read_offset = base_offset + entry.offset as usize;
let read_offset = base_read_offset + offset;
self.reader.lock().unwrap().seek(SeekFrom::Start(read_offset as u64))?;
self.reader.lock().unwrap().read(buf)
}
} | 30.923077 | 166 | 0.594216 |
9b91d0675e34ca2ae075aab3df9762a420954ba5 | 15,273 | // automatically generated by the FlatBuffers compiler, do not modify
use std::cmp::Ordering;
use std::mem;
extern crate flatbuffers;
use self::flatbuffers::{EndianScalar, Follow};
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
pub const ENUM_MIN_FRAME_FORMAT: i8 = 0;
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
pub const ENUM_MAX_FRAME_FORMAT: i8 = 24;
#[deprecated(
since = "2.0.0",
note = "Use associated constants instead. This will no longer be generated in 2021."
)]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_FRAME_FORMAT: [FrameFormat; 3] =
[FrameFormat::Gray, FrameFormat::Bgr, FrameFormat::Bgra];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct FrameFormat(pub i8);
#[allow(non_upper_case_globals)]
impl FrameFormat {
pub const Gray: Self = Self(0);
pub const Bgr: Self = Self(16);
pub const Bgra: Self = Self(24);
pub const ENUM_MIN: i8 = 0;
pub const ENUM_MAX: i8 = 24;
pub const ENUM_VALUES: &'static [Self] = &[Self::Gray, Self::Bgr, Self::Bgra];
/// Returns the variant's name or "" if unknown.
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::Gray => Some("Gray"),
Self::Bgr => Some("Bgr"),
Self::Bgra => Some("Bgra"),
_ => None,
}
}
}
impl std::fmt::Debug for FrameFormat {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for FrameFormat {
type Inner = Self;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = unsafe { flatbuffers::read_scalar_at::<i8>(buf, loc) };
Self(b)
}
}
impl flatbuffers::Push for FrameFormat {
type Output = FrameFormat;
#[inline]
fn push(&self, dst: &mut [u8], _rest: &[u8]) {
unsafe {
flatbuffers::emplace_scalar::<i8>(dst, self.0);
}
}
}
impl flatbuffers::EndianScalar for FrameFormat {
#[inline]
fn to_little_endian(self) -> Self {
let b = i8::to_le(self.0);
Self(b)
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(self) -> Self {
let b = i8::from_le(self.0);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for FrameFormat {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier,
pos: usize,
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
i8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for FrameFormat {}
pub enum FrameOffset {}
#[derive(Copy, Clone, PartialEq)]
pub struct Frame<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for Frame<'a> {
type Inner = Frame<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table { buf, loc },
}
}
}
impl<'a> Frame<'a> {
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
Frame { _tab: table }
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args FrameArgs<'args>,
) -> flatbuffers::WIPOffset<Frame<'bldr>> {
let mut builder = FrameBuilder::new(_fbb);
builder.add_exposure_end_t(args.exposure_end_t);
builder.add_exposure_begin_t(args.exposure_begin_t);
builder.add_end_t(args.end_t);
builder.add_begin_t(args.begin_t);
builder.add_t(args.t);
if let Some(x) = args.pixels {
builder.add_pixels(x);
}
builder.add_offset_y(args.offset_y);
builder.add_offset_x(args.offset_x);
builder.add_height(args.height);
builder.add_width(args.width);
builder.add_format(args.format);
builder.finish()
}
pub const VT_T: flatbuffers::VOffsetT = 4;
pub const VT_BEGIN_T: flatbuffers::VOffsetT = 6;
pub const VT_END_T: flatbuffers::VOffsetT = 8;
pub const VT_EXPOSURE_BEGIN_T: flatbuffers::VOffsetT = 10;
pub const VT_EXPOSURE_END_T: flatbuffers::VOffsetT = 12;
pub const VT_FORMAT: flatbuffers::VOffsetT = 14;
pub const VT_WIDTH: flatbuffers::VOffsetT = 16;
pub const VT_HEIGHT: flatbuffers::VOffsetT = 18;
pub const VT_OFFSET_X: flatbuffers::VOffsetT = 20;
pub const VT_OFFSET_Y: flatbuffers::VOffsetT = 22;
pub const VT_PIXELS: flatbuffers::VOffsetT = 24;
#[inline]
pub fn t(&self) -> i64 {
self._tab.get::<i64>(Frame::VT_T, Some(0)).unwrap()
}
#[inline]
pub fn begin_t(&self) -> i64 {
self._tab.get::<i64>(Frame::VT_BEGIN_T, Some(0)).unwrap()
}
#[inline]
pub fn end_t(&self) -> i64 {
self._tab.get::<i64>(Frame::VT_END_T, Some(0)).unwrap()
}
#[inline]
pub fn exposure_begin_t(&self) -> i64 {
self._tab
.get::<i64>(Frame::VT_EXPOSURE_BEGIN_T, Some(0))
.unwrap()
}
#[inline]
pub fn exposure_end_t(&self) -> i64 {
self._tab
.get::<i64>(Frame::VT_EXPOSURE_END_T, Some(0))
.unwrap()
}
#[inline]
pub fn format(&self) -> FrameFormat {
self._tab
.get::<FrameFormat>(Frame::VT_FORMAT, Some(FrameFormat::Gray))
.unwrap()
}
#[inline]
pub fn width(&self) -> i16 {
self._tab.get::<i16>(Frame::VT_WIDTH, Some(0)).unwrap()
}
#[inline]
pub fn height(&self) -> i16 {
self._tab.get::<i16>(Frame::VT_HEIGHT, Some(0)).unwrap()
}
#[inline]
pub fn offset_x(&self) -> i16 {
self._tab.get::<i16>(Frame::VT_OFFSET_X, Some(0)).unwrap()
}
#[inline]
pub fn offset_y(&self) -> i16 {
self._tab.get::<i16>(Frame::VT_OFFSET_Y, Some(0)).unwrap()
}
#[inline]
pub fn pixels(&self) -> Option<&'a [u8]> {
self._tab
.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, u8>>>(
Frame::VT_PIXELS,
None,
)
.map(|v| v.safe_slice())
}
}
impl flatbuffers::Verifiable for Frame<'_> {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier,
pos: usize,
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.visit_table(pos)?
.visit_field::<i64>(&"t", Self::VT_T, false)?
.visit_field::<i64>(&"begin_t", Self::VT_BEGIN_T, false)?
.visit_field::<i64>(&"end_t", Self::VT_END_T, false)?
.visit_field::<i64>(&"exposure_begin_t", Self::VT_EXPOSURE_BEGIN_T, false)?
.visit_field::<i64>(&"exposure_end_t", Self::VT_EXPOSURE_END_T, false)?
.visit_field::<FrameFormat>(&"format", Self::VT_FORMAT, false)?
.visit_field::<i16>(&"width", Self::VT_WIDTH, false)?
.visit_field::<i16>(&"height", Self::VT_HEIGHT, false)?
.visit_field::<i16>(&"offset_x", Self::VT_OFFSET_X, false)?
.visit_field::<i16>(&"offset_y", Self::VT_OFFSET_Y, false)?
.visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, u8>>>(
&"pixels",
Self::VT_PIXELS,
false,
)?
.finish();
Ok(())
}
}
pub struct FrameArgs<'a> {
pub t: i64,
pub begin_t: i64,
pub end_t: i64,
pub exposure_begin_t: i64,
pub exposure_end_t: i64,
pub format: FrameFormat,
pub width: i16,
pub height: i16,
pub offset_x: i16,
pub offset_y: i16,
pub pixels: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, u8>>>,
}
impl<'a> Default for FrameArgs<'a> {
#[inline]
fn default() -> Self {
FrameArgs {
t: 0,
begin_t: 0,
end_t: 0,
exposure_begin_t: 0,
exposure_end_t: 0,
format: FrameFormat::Gray,
width: 0,
height: 0,
offset_x: 0,
offset_y: 0,
pixels: None,
}
}
}
pub struct FrameBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> FrameBuilder<'a, 'b> {
#[inline]
pub fn add_t(&mut self, t: i64) {
self.fbb_.push_slot::<i64>(Frame::VT_T, t, 0);
}
#[inline]
pub fn add_begin_t(&mut self, begin_t: i64) {
self.fbb_.push_slot::<i64>(Frame::VT_BEGIN_T, begin_t, 0);
}
#[inline]
pub fn add_end_t(&mut self, end_t: i64) {
self.fbb_.push_slot::<i64>(Frame::VT_END_T, end_t, 0);
}
#[inline]
pub fn add_exposure_begin_t(&mut self, exposure_begin_t: i64) {
self.fbb_
.push_slot::<i64>(Frame::VT_EXPOSURE_BEGIN_T, exposure_begin_t, 0);
}
#[inline]
pub fn add_exposure_end_t(&mut self, exposure_end_t: i64) {
self.fbb_
.push_slot::<i64>(Frame::VT_EXPOSURE_END_T, exposure_end_t, 0);
}
#[inline]
pub fn add_format(&mut self, format: FrameFormat) {
self.fbb_
.push_slot::<FrameFormat>(Frame::VT_FORMAT, format, FrameFormat::Gray);
}
#[inline]
pub fn add_width(&mut self, width: i16) {
self.fbb_.push_slot::<i16>(Frame::VT_WIDTH, width, 0);
}
#[inline]
pub fn add_height(&mut self, height: i16) {
self.fbb_.push_slot::<i16>(Frame::VT_HEIGHT, height, 0);
}
#[inline]
pub fn add_offset_x(&mut self, offset_x: i16) {
self.fbb_.push_slot::<i16>(Frame::VT_OFFSET_X, offset_x, 0);
}
#[inline]
pub fn add_offset_y(&mut self, offset_y: i16) {
self.fbb_.push_slot::<i16>(Frame::VT_OFFSET_Y, offset_y, 0);
}
#[inline]
pub fn add_pixels(&mut self, pixels: flatbuffers::WIPOffset<flatbuffers::Vector<'b, u8>>) {
self.fbb_
.push_slot_always::<flatbuffers::WIPOffset<_>>(Frame::VT_PIXELS, pixels);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> FrameBuilder<'a, 'b> {
let start = _fbb.start_table();
FrameBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<Frame<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
impl std::fmt::Debug for Frame<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut ds = f.debug_struct("Frame");
ds.field("t", &self.t());
ds.field("begin_t", &self.begin_t());
ds.field("end_t", &self.end_t());
ds.field("exposure_begin_t", &self.exposure_begin_t());
ds.field("exposure_end_t", &self.exposure_end_t());
ds.field("format", &self.format());
ds.field("width", &self.width());
ds.field("height", &self.height());
ds.field("offset_x", &self.offset_x());
ds.field("offset_y", &self.offset_y());
ds.field("pixels", &self.pixels());
ds.finish()
}
}
#[inline]
#[deprecated(since = "2.0.0", note = "Deprecated in favor of `root_as...` methods.")]
pub fn get_root_as_frame<'a>(buf: &'a [u8]) -> Frame<'a> {
unsafe { flatbuffers::root_unchecked::<Frame<'a>>(buf) }
}
#[inline]
#[deprecated(since = "2.0.0", note = "Deprecated in favor of `root_as...` methods.")]
pub fn get_size_prefixed_root_as_frame<'a>(buf: &'a [u8]) -> Frame<'a> {
unsafe { flatbuffers::size_prefixed_root_unchecked::<Frame<'a>>(buf) }
}
#[inline]
/// Verifies that a buffer of bytes contains a `Frame`
/// and returns it.
/// Note that verification is still experimental and may not
/// catch every error, or be maximally performant. For the
/// previous, unchecked, behavior use
/// `root_as_frame_unchecked`.
pub fn root_as_frame(buf: &[u8]) -> Result<Frame, flatbuffers::InvalidFlatbuffer> {
flatbuffers::root::<Frame>(buf)
}
#[inline]
/// Verifies that a buffer of bytes contains a size prefixed
/// `Frame` and returns it.
/// Note that verification is still experimental and may not
/// catch every error, or be maximally performant. For the
/// previous, unchecked, behavior use
/// `size_prefixed_root_as_frame_unchecked`.
pub fn size_prefixed_root_as_frame(buf: &[u8]) -> Result<Frame, flatbuffers::InvalidFlatbuffer> {
flatbuffers::size_prefixed_root::<Frame>(buf)
}
#[inline]
/// Verifies, with the given options, that a buffer of bytes
/// contains a `Frame` and returns it.
/// Note that verification is still experimental and may not
/// catch every error, or be maximally performant. For the
/// previous, unchecked, behavior use
/// `root_as_frame_unchecked`.
pub fn root_as_frame_with_opts<'b, 'o>(
opts: &'o flatbuffers::VerifierOptions,
buf: &'b [u8],
) -> Result<Frame<'b>, flatbuffers::InvalidFlatbuffer> {
flatbuffers::root_with_opts::<Frame<'b>>(opts, buf)
}
#[inline]
/// Verifies, with the given verifier options, that a buffer of
/// bytes contains a size prefixed `Frame` and returns
/// it. Note that verification is still experimental and may not
/// catch every error, or be maximally performant. For the
/// previous, unchecked, behavior use
/// `root_as_frame_unchecked`.
pub fn size_prefixed_root_as_frame_with_opts<'b, 'o>(
opts: &'o flatbuffers::VerifierOptions,
buf: &'b [u8],
) -> Result<Frame<'b>, flatbuffers::InvalidFlatbuffer> {
flatbuffers::size_prefixed_root_with_opts::<Frame<'b>>(opts, buf)
}
#[inline]
/// Assumes, without verification, that a buffer of bytes contains a Frame and returns it.
/// # Safety
/// Callers must trust the given bytes do indeed contain a valid `Frame`.
pub unsafe fn root_as_frame_unchecked(buf: &[u8]) -> Frame {
flatbuffers::root_unchecked::<Frame>(buf)
}
#[inline]
/// Assumes, without verification, that a buffer of bytes contains a size prefixed Frame and returns it.
/// # Safety
/// Callers must trust the given bytes do indeed contain a valid size prefixed `Frame`.
pub unsafe fn size_prefixed_root_as_frame_unchecked(buf: &[u8]) -> Frame {
flatbuffers::size_prefixed_root_unchecked::<Frame>(buf)
}
pub const FRAME_IDENTIFIER: &str = "FRME";
#[inline]
pub fn frame_buffer_has_identifier(buf: &[u8]) -> bool {
flatbuffers::buffer_has_identifier(buf, FRAME_IDENTIFIER, false)
}
#[inline]
pub fn frame_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool {
flatbuffers::buffer_has_identifier(buf, FRAME_IDENTIFIER, true)
}
#[inline]
pub fn finish_frame_buffer<'a, 'b>(
fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
root: flatbuffers::WIPOffset<Frame<'a>>,
) {
fbb.finish(root, Some(FRAME_IDENTIFIER));
}
#[inline]
pub fn finish_size_prefixed_frame_buffer<'a, 'b>(
fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>,
root: flatbuffers::WIPOffset<Frame<'a>>,
) {
fbb.finish_size_prefixed(root, Some(FRAME_IDENTIFIER));
}
| 33.347162 | 104 | 0.615072 |
fe02cc5de8cb8d459de28973fc3ffee67c52db2b | 91,885 | #![cfg_attr(bootstrap, feature(or_patterns))]
#![recursion_limit = "256"]
use rustc_ast as ast;
use rustc_ast::util::parser::{self, AssocOp, Fixity};
use rustc_ast_pretty::pp::Breaks::{Consistent, Inconsistent};
use rustc_ast_pretty::pp::{self, Breaks};
use rustc_ast_pretty::pprust::{Comments, PrintState};
use rustc_hir as hir;
use rustc_hir::{GenericArg, GenericParam, GenericParamKind, Node};
use rustc_hir::{GenericBound, PatKind, RangeEnd, TraitBoundModifier};
use rustc_span::source_map::{SourceMap, Spanned};
use rustc_span::symbol::{kw, Ident, IdentPrinter, Symbol};
use rustc_span::{self, BytePos, FileName};
use rustc_target::spec::abi::Abi;
use std::borrow::Cow;
use std::cell::Cell;
use std::collections::BTreeMap;
use std::vec;
pub fn id_to_string(map: &dyn rustc_hir::intravisit::Map<'_>, hir_id: hir::HirId) -> String {
to_string(&map, |s| s.print_node(map.find(hir_id).unwrap()))
}
pub enum AnnNode<'a> {
Name(&'a Symbol),
Block(&'a hir::Block<'a>),
Item(&'a hir::Item<'a>),
SubItem(hir::HirId),
Expr(&'a hir::Expr<'a>),
Pat(&'a hir::Pat<'a>),
Arm(&'a hir::Arm<'a>),
}
pub enum Nested {
Item(hir::ItemId),
TraitItem(hir::TraitItemId),
ImplItem(hir::ImplItemId),
ForeignItem(hir::ForeignItemId),
Body(hir::BodyId),
BodyParamPat(hir::BodyId, usize),
}
pub trait PpAnn {
fn nested(&self, _state: &mut State<'_>, _nested: Nested) {}
fn pre(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {}
fn post(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {}
}
pub struct NoAnn;
impl PpAnn for NoAnn {}
pub const NO_ANN: &dyn PpAnn = &NoAnn;
impl PpAnn for hir::Crate<'_> {
fn nested(&self, state: &mut State<'_>, nested: Nested) {
match nested {
Nested::Item(id) => state.print_item(self.item(id)),
Nested::TraitItem(id) => state.print_trait_item(self.trait_item(id)),
Nested::ImplItem(id) => state.print_impl_item(self.impl_item(id)),
Nested::ForeignItem(id) => state.print_foreign_item(self.foreign_item(id)),
Nested::Body(id) => state.print_expr(&self.body(id).value),
Nested::BodyParamPat(id, i) => state.print_pat(&self.body(id).params[i].pat),
}
}
}
/// Identical to the `PpAnn` implementation for `hir::Crate`,
/// except it avoids creating a dependency on the whole crate.
impl PpAnn for &dyn rustc_hir::intravisit::Map<'_> {
fn nested(&self, state: &mut State<'_>, nested: Nested) {
match nested {
Nested::Item(id) => state.print_item(self.item(id)),
Nested::TraitItem(id) => state.print_trait_item(self.trait_item(id)),
Nested::ImplItem(id) => state.print_impl_item(self.impl_item(id)),
Nested::ForeignItem(id) => state.print_foreign_item(self.foreign_item(id)),
Nested::Body(id) => state.print_expr(&self.body(id).value),
Nested::BodyParamPat(id, i) => state.print_pat(&self.body(id).params[i].pat),
}
}
}
pub struct State<'a> {
pub s: pp::Printer,
comments: Option<Comments<'a>>,
attrs: &'a BTreeMap<hir::HirId, &'a [ast::Attribute]>,
ann: &'a (dyn PpAnn + 'a),
}
impl<'a> State<'a> {
pub fn print_node(&mut self, node: Node<'_>) {
match node {
Node::Param(a) => self.print_param(&a),
Node::Item(a) => self.print_item(&a),
Node::ForeignItem(a) => self.print_foreign_item(&a),
Node::TraitItem(a) => self.print_trait_item(a),
Node::ImplItem(a) => self.print_impl_item(a),
Node::Variant(a) => self.print_variant(&a),
Node::AnonConst(a) => self.print_anon_const(&a),
Node::Expr(a) => self.print_expr(&a),
Node::Stmt(a) => self.print_stmt(&a),
Node::PathSegment(a) => self.print_path_segment(&a),
Node::Ty(a) => self.print_type(&a),
Node::TraitRef(a) => self.print_trait_ref(&a),
Node::Binding(a) | Node::Pat(a) => self.print_pat(&a),
Node::Arm(a) => self.print_arm(&a),
Node::Block(a) => {
// Containing cbox, will be closed by print-block at `}`.
self.cbox(INDENT_UNIT);
// Head-ibox, will be closed by print-block after `{`.
self.ibox(0);
self.print_block(&a)
}
Node::Lifetime(a) => self.print_lifetime(&a),
Node::Visibility(a) => self.print_visibility(&a),
Node::GenericParam(_) => panic!("cannot print Node::GenericParam"),
Node::Field(_) => panic!("cannot print Node::Field"),
// These cases do not carry enough information in the
// `hir_map` to reconstruct their full structure for pretty
// printing.
Node::Ctor(..) => panic!("cannot print isolated Ctor"),
Node::Local(a) => self.print_local_decl(&a),
Node::MacroDef(_) => panic!("cannot print MacroDef"),
Node::Crate(..) => panic!("cannot print Crate"),
}
}
}
impl std::ops::Deref for State<'_> {
type Target = pp::Printer;
fn deref(&self) -> &Self::Target {
&self.s
}
}
impl std::ops::DerefMut for State<'_> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.s
}
}
impl<'a> PrintState<'a> for State<'a> {
fn comments(&mut self) -> &mut Option<Comments<'a>> {
&mut self.comments
}
fn print_ident(&mut self, ident: Ident) {
self.s.word(IdentPrinter::for_ast_ident(ident, ident.is_raw_guess()).to_string());
self.ann.post(self, AnnNode::Name(&ident.name))
}
fn print_generic_args(&mut self, _: &ast::GenericArgs, _colons_before_params: bool) {
panic!("AST generic args printed by HIR pretty-printer");
}
}
pub const INDENT_UNIT: usize = 4;
/// Requires you to pass an input filename and reader so that
/// it can scan the input text for comments to copy forward.
pub fn print_crate<'a>(
sm: &'a SourceMap,
krate: &hir::Crate<'_>,
filename: FileName,
input: String,
ann: &'a dyn PpAnn,
) -> String {
let mut s = State::new_from_input(sm, filename, input, &krate.attrs, ann);
// When printing the AST, we sometimes need to inject `#[no_std]` here.
// Since you can't compile the HIR, it's not necessary.
s.print_mod(&krate.item, s.attrs(hir::CRATE_HIR_ID));
s.print_remaining_comments();
s.s.eof()
}
impl<'a> State<'a> {
pub fn new_from_input(
sm: &'a SourceMap,
filename: FileName,
input: String,
attrs: &'a BTreeMap<hir::HirId, &[ast::Attribute]>,
ann: &'a dyn PpAnn,
) -> State<'a> {
State {
s: pp::mk_printer(),
comments: Some(Comments::new(sm, filename, input)),
attrs,
ann,
}
}
fn attrs(&self, id: hir::HirId) -> &'a [ast::Attribute] {
self.attrs.get(&id).map_or(&[], |la| *la)
}
}
pub fn to_string<F>(ann: &dyn PpAnn, f: F) -> String
where
F: FnOnce(&mut State<'_>),
{
let mut printer =
State { s: pp::mk_printer(), comments: None, attrs: &BTreeMap::default(), ann };
f(&mut printer);
printer.s.eof()
}
pub fn visibility_qualified<S: Into<Cow<'static, str>>>(vis: &hir::Visibility<'_>, w: S) -> String {
to_string(NO_ANN, |s| {
s.print_visibility(vis);
s.s.word(w)
})
}
pub fn generic_params_to_string(generic_params: &[GenericParam<'_>]) -> String {
to_string(NO_ANN, |s| s.print_generic_params(generic_params))
}
pub fn bounds_to_string<'b>(bounds: impl IntoIterator<Item = &'b hir::GenericBound<'b>>) -> String {
to_string(NO_ANN, |s| s.print_bounds("", bounds))
}
pub fn ty_to_string(ty: &hir::Ty<'_>) -> String {
to_string(NO_ANN, |s| s.print_type(ty))
}
pub fn path_segment_to_string(segment: &hir::PathSegment<'_>) -> String {
to_string(NO_ANN, |s| s.print_path_segment(segment))
}
pub fn path_to_string(segment: &hir::Path<'_>) -> String {
to_string(NO_ANN, |s| s.print_path(segment, false))
}
pub fn fn_to_string(
decl: &hir::FnDecl<'_>,
header: hir::FnHeader,
name: Option<Symbol>,
generics: &hir::Generics<'_>,
vis: &hir::Visibility<'_>,
arg_names: &[Ident],
body_id: Option<hir::BodyId>,
) -> String {
to_string(NO_ANN, |s| s.print_fn(decl, header, name, generics, vis, arg_names, body_id))
}
pub fn enum_def_to_string(
enum_definition: &hir::EnumDef<'_>,
generics: &hir::Generics<'_>,
name: Symbol,
span: rustc_span::Span,
visibility: &hir::Visibility<'_>,
) -> String {
to_string(NO_ANN, |s| s.print_enum_def(enum_definition, generics, name, span, visibility))
}
impl<'a> State<'a> {
pub fn cbox(&mut self, u: usize) {
self.s.cbox(u);
}
pub fn nbsp(&mut self) {
self.s.word(" ")
}
pub fn word_nbsp<S: Into<Cow<'static, str>>>(&mut self, w: S) {
self.s.word(w);
self.nbsp()
}
pub fn head<S: Into<Cow<'static, str>>>(&mut self, w: S) {
let w = w.into();
// outer-box is consistent
self.cbox(INDENT_UNIT);
// head-box is inconsistent
self.ibox(w.len() + 1);
// keyword that starts the head
if !w.is_empty() {
self.word_nbsp(w);
}
}
pub fn bopen(&mut self) {
self.s.word("{");
self.end(); // close the head-box
}
pub fn bclose_maybe_open(&mut self, span: rustc_span::Span, close_box: bool) {
self.maybe_print_comment(span.hi());
self.break_offset_if_not_bol(1, -(INDENT_UNIT as isize));
self.s.word("}");
if close_box {
self.end(); // close the outer-box
}
}
pub fn bclose(&mut self, span: rustc_span::Span) {
self.bclose_maybe_open(span, true)
}
pub fn space_if_not_bol(&mut self) {
if !self.s.is_beginning_of_line() {
self.s.space();
}
}
pub fn break_offset_if_not_bol(&mut self, n: usize, off: isize) {
if !self.s.is_beginning_of_line() {
self.s.break_offset(n, off)
} else if off != 0 && self.s.last_token().is_hardbreak_tok() {
// We do something pretty sketchy here: tuck the nonzero
// offset-adjustment we were going to deposit along with the
// break into the previous hardbreak.
self.s.replace_last_token(pp::Printer::hardbreak_tok_offset(off));
}
}
// Synthesizes a comment that was not textually present in the original source
// file.
pub fn synth_comment(&mut self, text: String) {
self.s.word("/*");
self.s.space();
self.s.word(text);
self.s.space();
self.s.word("*/")
}
pub fn commasep_cmnt<T, F, G>(&mut self, b: Breaks, elts: &[T], mut op: F, mut get_span: G)
where
F: FnMut(&mut State<'_>, &T),
G: FnMut(&T) -> rustc_span::Span,
{
self.rbox(0, b);
let len = elts.len();
let mut i = 0;
for elt in elts {
self.maybe_print_comment(get_span(elt).hi());
op(self, elt);
i += 1;
if i < len {
self.s.word(",");
self.maybe_print_trailing_comment(get_span(elt), Some(get_span(&elts[i]).hi()));
self.space_if_not_bol();
}
}
self.end();
}
pub fn commasep_exprs(&mut self, b: Breaks, exprs: &[hir::Expr<'_>]) {
self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&e), |e| e.span)
}
pub fn print_mod(&mut self, _mod: &hir::Mod<'_>, attrs: &[ast::Attribute]) {
self.print_inner_attributes(attrs);
for &item_id in _mod.item_ids {
self.ann.nested(self, Nested::Item(item_id));
}
}
pub fn print_opt_lifetime(&mut self, lifetime: &hir::Lifetime) {
if !lifetime.is_elided() {
self.print_lifetime(lifetime);
self.nbsp();
}
}
pub fn print_type(&mut self, ty: &hir::Ty<'_>) {
self.maybe_print_comment(ty.span.lo());
self.ibox(0);
match ty.kind {
hir::TyKind::Slice(ref ty) => {
self.s.word("[");
self.print_type(&ty);
self.s.word("]");
}
hir::TyKind::Ptr(ref mt) => {
self.s.word("*");
self.print_mt(mt, true);
}
hir::TyKind::Rptr(ref lifetime, ref mt) => {
self.s.word("&");
self.print_opt_lifetime(lifetime);
self.print_mt(mt, false);
}
hir::TyKind::Never => {
self.s.word("!");
}
hir::TyKind::Tup(ref elts) => {
self.popen();
self.commasep(Inconsistent, &elts[..], |s, ty| s.print_type(&ty));
if elts.len() == 1 {
self.s.word(",");
}
self.pclose();
}
hir::TyKind::BareFn(ref f) => {
self.print_ty_fn(
f.abi,
f.unsafety,
&f.decl,
None,
&f.generic_params,
f.param_names,
);
}
hir::TyKind::OpaqueDef(..) => self.s.word("/*impl Trait*/"),
hir::TyKind::Path(ref qpath) => self.print_qpath(qpath, false),
hir::TyKind::TraitObject(bounds, ref lifetime, syntax) => {
if syntax == ast::TraitObjectSyntax::Dyn {
self.word_space("dyn");
}
let mut first = true;
for bound in bounds {
if first {
first = false;
} else {
self.nbsp();
self.word_space("+");
}
self.print_poly_trait_ref(bound);
}
if !lifetime.is_elided() {
self.nbsp();
self.word_space("+");
self.print_lifetime(lifetime);
}
}
hir::TyKind::Array(ref ty, ref length) => {
self.s.word("[");
self.print_type(&ty);
self.s.word("; ");
self.print_anon_const(length);
self.s.word("]");
}
hir::TyKind::Typeof(ref e) => {
self.s.word("typeof(");
self.print_anon_const(e);
self.s.word(")");
}
hir::TyKind::Infer => {
self.s.word("_");
}
hir::TyKind::Err => {
self.popen();
self.s.word("/*ERROR*/");
self.pclose();
}
}
self.end()
}
pub fn print_foreign_item(&mut self, item: &hir::ForeignItem<'_>) {
self.hardbreak_if_not_bol();
self.maybe_print_comment(item.span.lo());
self.print_outer_attributes(self.attrs(item.hir_id()));
match item.kind {
hir::ForeignItemKind::Fn(ref decl, ref arg_names, ref generics) => {
self.head("");
self.print_fn(
decl,
hir::FnHeader {
unsafety: hir::Unsafety::Normal,
constness: hir::Constness::NotConst,
abi: Abi::Rust,
asyncness: hir::IsAsync::NotAsync,
},
Some(item.ident.name),
generics,
&item.vis,
arg_names,
None,
);
self.end(); // end head-ibox
self.s.word(";");
self.end() // end the outer fn box
}
hir::ForeignItemKind::Static(ref t, m) => {
self.head(visibility_qualified(&item.vis, "static"));
if m == hir::Mutability::Mut {
self.word_space("mut");
}
self.print_ident(item.ident);
self.word_space(":");
self.print_type(&t);
self.s.word(";");
self.end(); // end the head-ibox
self.end() // end the outer cbox
}
hir::ForeignItemKind::Type => {
self.head(visibility_qualified(&item.vis, "type"));
self.print_ident(item.ident);
self.s.word(";");
self.end(); // end the head-ibox
self.end() // end the outer cbox
}
}
}
fn print_associated_const(
&mut self,
ident: Ident,
ty: &hir::Ty<'_>,
default: Option<hir::BodyId>,
vis: &hir::Visibility<'_>,
) {
self.s.word(visibility_qualified(vis, ""));
self.word_space("const");
self.print_ident(ident);
self.word_space(":");
self.print_type(ty);
if let Some(expr) = default {
self.s.space();
self.word_space("=");
self.ann.nested(self, Nested::Body(expr));
}
self.s.word(";")
}
fn print_associated_type(
&mut self,
ident: Ident,
generics: &hir::Generics<'_>,
bounds: Option<hir::GenericBounds<'_>>,
ty: Option<&hir::Ty<'_>>,
) {
self.word_space("type");
self.print_ident(ident);
self.print_generic_params(&generics.params);
if let Some(bounds) = bounds {
self.print_bounds(":", bounds);
}
self.print_where_clause(&generics.where_clause);
if let Some(ty) = ty {
self.s.space();
self.word_space("=");
self.print_type(ty);
}
self.s.word(";")
}
fn print_item_type(
&mut self,
item: &hir::Item<'_>,
generics: &hir::Generics<'_>,
inner: impl Fn(&mut Self),
) {
self.head(visibility_qualified(&item.vis, "type"));
self.print_ident(item.ident);
self.print_generic_params(&generics.params);
self.end(); // end the inner ibox
self.print_where_clause(&generics.where_clause);
self.s.space();
inner(self);
self.s.word(";");
self.end(); // end the outer ibox
}
/// Pretty-print an item
pub fn print_item(&mut self, item: &hir::Item<'_>) {
self.hardbreak_if_not_bol();
self.maybe_print_comment(item.span.lo());
let attrs = self.attrs(item.hir_id());
self.print_outer_attributes(attrs);
self.ann.pre(self, AnnNode::Item(item));
match item.kind {
hir::ItemKind::ExternCrate(orig_name) => {
self.head(visibility_qualified(&item.vis, "extern crate"));
if let Some(orig_name) = orig_name {
self.print_name(orig_name);
self.s.space();
self.s.word("as");
self.s.space();
}
self.print_ident(item.ident);
self.s.word(";");
self.end(); // end inner head-block
self.end(); // end outer head-block
}
hir::ItemKind::Use(ref path, kind) => {
self.head(visibility_qualified(&item.vis, "use"));
self.print_path(path, false);
match kind {
hir::UseKind::Single => {
if path.segments.last().unwrap().ident != item.ident {
self.s.space();
self.word_space("as");
self.print_ident(item.ident);
}
self.s.word(";");
}
hir::UseKind::Glob => self.s.word("::*;"),
hir::UseKind::ListStem => self.s.word("::{};"),
}
self.end(); // end inner head-block
self.end(); // end outer head-block
}
hir::ItemKind::Static(ref ty, m, expr) => {
self.head(visibility_qualified(&item.vis, "static"));
if m == hir::Mutability::Mut {
self.word_space("mut");
}
self.print_ident(item.ident);
self.word_space(":");
self.print_type(&ty);
self.s.space();
self.end(); // end the head-ibox
self.word_space("=");
self.ann.nested(self, Nested::Body(expr));
self.s.word(";");
self.end(); // end the outer cbox
}
hir::ItemKind::Const(ref ty, expr) => {
self.head(visibility_qualified(&item.vis, "const"));
self.print_ident(item.ident);
self.word_space(":");
self.print_type(&ty);
self.s.space();
self.end(); // end the head-ibox
self.word_space("=");
self.ann.nested(self, Nested::Body(expr));
self.s.word(";");
self.end(); // end the outer cbox
}
hir::ItemKind::Fn(ref sig, ref param_names, body) => {
self.head("");
self.print_fn(
&sig.decl,
sig.header,
Some(item.ident.name),
param_names,
&item.vis,
&[],
Some(body),
);
self.s.word(" ");
self.end(); // need to close a box
self.end(); // need to close a box
self.ann.nested(self, Nested::Body(body));
}
hir::ItemKind::Mod(ref _mod) => {
self.head(visibility_qualified(&item.vis, "mod"));
self.print_ident(item.ident);
self.nbsp();
self.bopen();
self.print_mod(_mod, attrs);
self.bclose(item.span);
}
hir::ItemKind::ForeignMod { abi, items } => {
self.head("extern");
self.word_nbsp(abi.to_string());
self.bopen();
self.print_inner_attributes(self.attrs(item.hir_id()));
for item in items {
self.ann.nested(self, Nested::ForeignItem(item.id));
}
self.bclose(item.span);
}
hir::ItemKind::GlobalAsm(ref asm) => {
self.head(visibility_qualified(&item.vis, "global_asm!"));
self.print_inline_asm(asm);
self.end()
}
hir::ItemKind::TyAlias(ref ty, ref generics) => {
self.print_item_type(item, &generics, |state| {
state.word_space("=");
state.print_type(&ty);
});
}
hir::ItemKind::OpaqueTy(ref opaque_ty) => {
self.print_item_type(item, &opaque_ty.generics, |state| {
let mut real_bounds = Vec::with_capacity(opaque_ty.bounds.len());
for b in opaque_ty.bounds.iter() {
if let GenericBound::Trait(ref ptr, hir::TraitBoundModifier::Maybe) = *b {
state.s.space();
state.word_space("for ?");
state.print_trait_ref(&ptr.trait_ref);
} else {
real_bounds.push(b);
}
}
state.print_bounds("= impl", real_bounds);
});
}
hir::ItemKind::Enum(ref enum_definition, ref params) => {
self.print_enum_def(enum_definition, params, item.ident.name, item.span, &item.vis);
}
hir::ItemKind::Struct(ref struct_def, ref generics) => {
self.head(visibility_qualified(&item.vis, "struct"));
self.print_struct(struct_def, generics, item.ident.name, item.span, true);
}
hir::ItemKind::Union(ref struct_def, ref generics) => {
self.head(visibility_qualified(&item.vis, "union"));
self.print_struct(struct_def, generics, item.ident.name, item.span, true);
}
hir::ItemKind::Impl(hir::Impl {
unsafety,
polarity,
defaultness,
constness,
defaultness_span: _,
ref generics,
ref of_trait,
ref self_ty,
items,
}) => {
self.head("");
self.print_visibility(&item.vis);
self.print_defaultness(defaultness);
self.print_unsafety(unsafety);
self.word_nbsp("impl");
if !generics.params.is_empty() {
self.print_generic_params(&generics.params);
self.s.space();
}
if constness == hir::Constness::Const {
self.word_nbsp("const");
}
if let hir::ImplPolarity::Negative(_) = polarity {
self.s.word("!");
}
if let Some(ref t) = of_trait {
self.print_trait_ref(t);
self.s.space();
self.word_space("for");
}
self.print_type(&self_ty);
self.print_where_clause(&generics.where_clause);
self.s.space();
self.bopen();
self.print_inner_attributes(attrs);
for impl_item in items {
self.ann.nested(self, Nested::ImplItem(impl_item.id));
}
self.bclose(item.span);
}
hir::ItemKind::Trait(is_auto, unsafety, ref generics, ref bounds, trait_items) => {
self.head("");
self.print_visibility(&item.vis);
self.print_is_auto(is_auto);
self.print_unsafety(unsafety);
self.word_nbsp("trait");
self.print_ident(item.ident);
self.print_generic_params(&generics.params);
let mut real_bounds = Vec::with_capacity(bounds.len());
for b in bounds.iter() {
if let GenericBound::Trait(ref ptr, hir::TraitBoundModifier::Maybe) = *b {
self.s.space();
self.word_space("for ?");
self.print_trait_ref(&ptr.trait_ref);
} else {
real_bounds.push(b);
}
}
self.print_bounds(":", real_bounds);
self.print_where_clause(&generics.where_clause);
self.s.word(" ");
self.bopen();
for trait_item in trait_items {
self.ann.nested(self, Nested::TraitItem(trait_item.id));
}
self.bclose(item.span);
}
hir::ItemKind::TraitAlias(ref generics, ref bounds) => {
self.head("");
self.print_visibility(&item.vis);
self.word_nbsp("trait");
self.print_ident(item.ident);
self.print_generic_params(&generics.params);
let mut real_bounds = Vec::with_capacity(bounds.len());
// FIXME(durka) this seems to be some quite outdated syntax
for b in bounds.iter() {
if let GenericBound::Trait(ref ptr, hir::TraitBoundModifier::Maybe) = *b {
self.s.space();
self.word_space("for ?");
self.print_trait_ref(&ptr.trait_ref);
} else {
real_bounds.push(b);
}
}
self.nbsp();
self.print_bounds("=", real_bounds);
self.print_where_clause(&generics.where_clause);
self.s.word(";");
}
}
self.ann.post(self, AnnNode::Item(item))
}
pub fn print_trait_ref(&mut self, t: &hir::TraitRef<'_>) {
self.print_path(&t.path, false)
}
fn print_formal_generic_params(&mut self, generic_params: &[hir::GenericParam<'_>]) {
if !generic_params.is_empty() {
self.s.word("for");
self.print_generic_params(generic_params);
self.nbsp();
}
}
fn print_poly_trait_ref(&mut self, t: &hir::PolyTraitRef<'_>) {
self.print_formal_generic_params(&t.bound_generic_params);
self.print_trait_ref(&t.trait_ref)
}
pub fn print_enum_def(
&mut self,
enum_definition: &hir::EnumDef<'_>,
generics: &hir::Generics<'_>,
name: Symbol,
span: rustc_span::Span,
visibility: &hir::Visibility<'_>,
) {
self.head(visibility_qualified(visibility, "enum"));
self.print_name(name);
self.print_generic_params(&generics.params);
self.print_where_clause(&generics.where_clause);
self.s.space();
self.print_variants(&enum_definition.variants, span)
}
pub fn print_variants(&mut self, variants: &[hir::Variant<'_>], span: rustc_span::Span) {
self.bopen();
for v in variants {
self.space_if_not_bol();
self.maybe_print_comment(v.span.lo());
self.print_outer_attributes(self.attrs(v.id));
self.ibox(INDENT_UNIT);
self.print_variant(v);
self.s.word(",");
self.end();
self.maybe_print_trailing_comment(v.span, None);
}
self.bclose(span)
}
pub fn print_visibility(&mut self, vis: &hir::Visibility<'_>) {
match vis.node {
hir::VisibilityKind::Public => self.word_nbsp("pub"),
hir::VisibilityKind::Crate(ast::CrateSugar::JustCrate) => self.word_nbsp("crate"),
hir::VisibilityKind::Crate(ast::CrateSugar::PubCrate) => self.word_nbsp("pub(crate)"),
hir::VisibilityKind::Restricted { ref path, .. } => {
self.s.word("pub(");
if path.segments.len() == 1 && path.segments[0].ident.name == kw::Super {
// Special case: `super` can print like `pub(super)`.
self.s.word("super");
} else {
// Everything else requires `in` at present.
self.word_nbsp("in");
self.print_path(path, false);
}
self.word_nbsp(")");
}
hir::VisibilityKind::Inherited => (),
}
}
pub fn print_defaultness(&mut self, defaultness: hir::Defaultness) {
match defaultness {
hir::Defaultness::Default { .. } => self.word_nbsp("default"),
hir::Defaultness::Final => (),
}
}
pub fn print_struct(
&mut self,
struct_def: &hir::VariantData<'_>,
generics: &hir::Generics<'_>,
name: Symbol,
span: rustc_span::Span,
print_finalizer: bool,
) {
self.print_name(name);
self.print_generic_params(&generics.params);
match struct_def {
hir::VariantData::Tuple(..) | hir::VariantData::Unit(..) => {
if let hir::VariantData::Tuple(..) = struct_def {
self.popen();
self.commasep(Inconsistent, struct_def.fields(), |s, field| {
s.maybe_print_comment(field.span.lo());
s.print_outer_attributes(s.attrs(field.hir_id));
s.print_visibility(&field.vis);
s.print_type(&field.ty)
});
self.pclose();
}
self.print_where_clause(&generics.where_clause);
if print_finalizer {
self.s.word(";");
}
self.end();
self.end() // close the outer-box
}
hir::VariantData::Struct(..) => {
self.print_where_clause(&generics.where_clause);
self.nbsp();
self.bopen();
self.hardbreak_if_not_bol();
for field in struct_def.fields() {
self.hardbreak_if_not_bol();
self.maybe_print_comment(field.span.lo());
self.print_outer_attributes(self.attrs(field.hir_id));
self.print_visibility(&field.vis);
self.print_ident(field.ident);
self.word_nbsp(":");
self.print_type(&field.ty);
self.s.word(",");
}
self.bclose(span)
}
}
}
pub fn print_variant(&mut self, v: &hir::Variant<'_>) {
self.head("");
let generics = hir::Generics::empty();
self.print_struct(&v.data, &generics, v.ident.name, v.span, false);
if let Some(ref d) = v.disr_expr {
self.s.space();
self.word_space("=");
self.print_anon_const(d);
}
}
pub fn print_method_sig(
&mut self,
ident: Ident,
m: &hir::FnSig<'_>,
generics: &hir::Generics<'_>,
vis: &hir::Visibility<'_>,
arg_names: &[Ident],
body_id: Option<hir::BodyId>,
) {
self.print_fn(&m.decl, m.header, Some(ident.name), generics, vis, arg_names, body_id)
}
pub fn print_trait_item(&mut self, ti: &hir::TraitItem<'_>) {
self.ann.pre(self, AnnNode::SubItem(ti.hir_id()));
self.hardbreak_if_not_bol();
self.maybe_print_comment(ti.span.lo());
self.print_outer_attributes(self.attrs(ti.hir_id()));
match ti.kind {
hir::TraitItemKind::Const(ref ty, default) => {
let vis =
Spanned { span: rustc_span::DUMMY_SP, node: hir::VisibilityKind::Inherited };
self.print_associated_const(ti.ident, &ty, default, &vis);
}
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(ref arg_names)) => {
let vis =
Spanned { span: rustc_span::DUMMY_SP, node: hir::VisibilityKind::Inherited };
self.print_method_sig(ti.ident, sig, &ti.generics, &vis, arg_names, None);
self.s.word(";");
}
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Provided(body)) => {
let vis =
Spanned { span: rustc_span::DUMMY_SP, node: hir::VisibilityKind::Inherited };
self.head("");
self.print_method_sig(ti.ident, sig, &ti.generics, &vis, &[], Some(body));
self.nbsp();
self.end(); // need to close a box
self.end(); // need to close a box
self.ann.nested(self, Nested::Body(body));
}
hir::TraitItemKind::Type(ref bounds, ref default) => {
self.print_associated_type(
ti.ident,
&ti.generics,
Some(bounds),
default.as_ref().map(|ty| &**ty),
);
}
}
self.ann.post(self, AnnNode::SubItem(ti.hir_id()))
}
pub fn print_impl_item(&mut self, ii: &hir::ImplItem<'_>) {
self.ann.pre(self, AnnNode::SubItem(ii.hir_id()));
self.hardbreak_if_not_bol();
self.maybe_print_comment(ii.span.lo());
self.print_outer_attributes(self.attrs(ii.hir_id()));
self.print_defaultness(ii.defaultness);
match ii.kind {
hir::ImplItemKind::Const(ref ty, expr) => {
self.print_associated_const(ii.ident, &ty, Some(expr), &ii.vis);
}
hir::ImplItemKind::Fn(ref sig, body) => {
self.head("");
self.print_method_sig(ii.ident, sig, &ii.generics, &ii.vis, &[], Some(body));
self.nbsp();
self.end(); // need to close a box
self.end(); // need to close a box
self.ann.nested(self, Nested::Body(body));
}
hir::ImplItemKind::TyAlias(ref ty) => {
self.print_associated_type(ii.ident, &ii.generics, None, Some(ty));
}
}
self.ann.post(self, AnnNode::SubItem(ii.hir_id()))
}
pub fn print_local(&mut self, init: Option<&hir::Expr<'_>>, decl: impl Fn(&mut Self)) {
self.space_if_not_bol();
self.ibox(INDENT_UNIT);
self.word_nbsp("let");
self.ibox(INDENT_UNIT);
decl(self);
self.end();
if let Some(ref init) = init {
self.nbsp();
self.word_space("=");
self.print_expr(&init);
}
self.end()
}
pub fn print_stmt(&mut self, st: &hir::Stmt<'_>) {
self.maybe_print_comment(st.span.lo());
match st.kind {
hir::StmtKind::Local(ref loc) => {
self.print_local(loc.init.as_deref(), |this| this.print_local_decl(&loc));
}
hir::StmtKind::Item(item) => self.ann.nested(self, Nested::Item(item)),
hir::StmtKind::Expr(ref expr) => {
self.space_if_not_bol();
self.print_expr(&expr);
}
hir::StmtKind::Semi(ref expr) => {
self.space_if_not_bol();
self.print_expr(&expr);
self.s.word(";");
}
}
if stmt_ends_with_semi(&st.kind) {
self.s.word(";");
}
self.maybe_print_trailing_comment(st.span, None)
}
pub fn print_block(&mut self, blk: &hir::Block<'_>) {
self.print_block_with_attrs(blk, &[])
}
pub fn print_block_unclosed(&mut self, blk: &hir::Block<'_>) {
self.print_block_maybe_unclosed(blk, &[], false)
}
pub fn print_block_with_attrs(&mut self, blk: &hir::Block<'_>, attrs: &[ast::Attribute]) {
self.print_block_maybe_unclosed(blk, attrs, true)
}
pub fn print_block_maybe_unclosed(
&mut self,
blk: &hir::Block<'_>,
attrs: &[ast::Attribute],
close_box: bool,
) {
match blk.rules {
hir::BlockCheckMode::UnsafeBlock(..) => self.word_space("unsafe"),
hir::BlockCheckMode::PushUnsafeBlock(..) => self.word_space("push_unsafe"),
hir::BlockCheckMode::PopUnsafeBlock(..) => self.word_space("pop_unsafe"),
hir::BlockCheckMode::DefaultBlock => (),
}
self.maybe_print_comment(blk.span.lo());
self.ann.pre(self, AnnNode::Block(blk));
self.bopen();
self.print_inner_attributes(attrs);
for st in blk.stmts {
self.print_stmt(st);
}
if let Some(ref expr) = blk.expr {
self.space_if_not_bol();
self.print_expr(&expr);
self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi()));
}
self.bclose_maybe_open(blk.span, close_box);
self.ann.post(self, AnnNode::Block(blk))
}
fn print_else(&mut self, els: Option<&hir::Expr<'_>>) {
match els {
Some(else_) => {
match else_.kind {
// "another else-if"
hir::ExprKind::If(ref i, ref then, ref e) => {
self.cbox(INDENT_UNIT - 1);
self.ibox(0);
self.s.word(" else if ");
self.print_expr_as_cond(&i);
self.s.space();
self.print_expr(&then);
self.print_else(e.as_ref().map(|e| &**e))
}
// "final else"
hir::ExprKind::Block(ref b, _) => {
self.cbox(INDENT_UNIT - 1);
self.ibox(0);
self.s.word(" else ");
self.print_block(&b)
}
hir::ExprKind::Match(ref expr, arms, _) => {
// else if let desugared to match
assert!(arms.len() == 2, "if let desugars to match with two arms");
self.s.word(" else ");
self.s.word("{");
self.cbox(INDENT_UNIT);
self.ibox(INDENT_UNIT);
self.word_nbsp("match");
self.print_expr_as_cond(&expr);
self.s.space();
self.bopen();
for arm in arms {
self.print_arm(arm);
}
self.bclose(expr.span);
self.s.word("}");
}
// BLEAH, constraints would be great here
_ => {
panic!("print_if saw if with weird alternative");
}
}
}
_ => {}
}
}
pub fn print_if(
&mut self,
test: &hir::Expr<'_>,
blk: &hir::Expr<'_>,
elseopt: Option<&hir::Expr<'_>>,
) {
self.head("if");
self.print_expr_as_cond(test);
self.s.space();
self.print_expr(blk);
self.print_else(elseopt)
}
pub fn print_anon_const(&mut self, constant: &hir::AnonConst) {
self.ann.nested(self, Nested::Body(constant.body))
}
fn print_call_post(&mut self, args: &[hir::Expr<'_>]) {
self.popen();
self.commasep_exprs(Inconsistent, args);
self.pclose()
}
pub fn print_expr_maybe_paren(&mut self, expr: &hir::Expr<'_>, prec: i8) {
let needs_par = expr.precedence().order() < prec;
if needs_par {
self.popen();
}
self.print_expr(expr);
if needs_par {
self.pclose();
}
}
/// Print an expr using syntax that's acceptable in a condition position, such as the `cond` in
/// `if cond { ... }`.
pub fn print_expr_as_cond(&mut self, expr: &hir::Expr<'_>) {
let needs_par = match expr.kind {
// These cases need parens due to the parse error observed in #26461: `if return {}`
// parses as the erroneous construct `if (return {})`, not `if (return) {}`.
hir::ExprKind::Closure(..) | hir::ExprKind::Ret(..) | hir::ExprKind::Break(..) => true,
_ => contains_exterior_struct_lit(expr),
};
if needs_par {
self.popen();
}
self.print_expr(expr);
if needs_par {
self.pclose();
}
}
fn print_expr_vec(&mut self, exprs: &[hir::Expr<'_>]) {
self.ibox(INDENT_UNIT);
self.s.word("[");
self.commasep_exprs(Inconsistent, exprs);
self.s.word("]");
self.end()
}
fn print_expr_anon_const(&mut self, anon_const: &hir::AnonConst) {
self.ibox(INDENT_UNIT);
self.s.word_space("const");
self.print_anon_const(anon_const);
self.end()
}
fn print_expr_repeat(&mut self, element: &hir::Expr<'_>, count: &hir::AnonConst) {
self.ibox(INDENT_UNIT);
self.s.word("[");
self.print_expr(element);
self.word_space(";");
self.print_anon_const(count);
self.s.word("]");
self.end()
}
fn print_expr_struct(
&mut self,
qpath: &hir::QPath<'_>,
fields: &[hir::ExprField<'_>],
wth: &Option<&hir::Expr<'_>>,
) {
self.print_qpath(qpath, true);
self.s.word("{");
self.commasep_cmnt(
Consistent,
fields,
|s, field| {
s.ibox(INDENT_UNIT);
if !field.is_shorthand {
s.print_ident(field.ident);
s.word_space(":");
}
s.print_expr(&field.expr);
s.end()
},
|f| f.span,
);
match *wth {
Some(ref expr) => {
self.ibox(INDENT_UNIT);
if !fields.is_empty() {
self.s.word(",");
self.s.space();
}
self.s.word("..");
self.print_expr(&expr);
self.end();
}
_ => {
if !fields.is_empty() {
self.s.word(",")
}
}
}
self.s.word("}");
}
fn print_expr_tup(&mut self, exprs: &[hir::Expr<'_>]) {
self.popen();
self.commasep_exprs(Inconsistent, exprs);
if exprs.len() == 1 {
self.s.word(",");
}
self.pclose()
}
fn print_expr_call(&mut self, func: &hir::Expr<'_>, args: &[hir::Expr<'_>]) {
let prec = match func.kind {
hir::ExprKind::Field(..) => parser::PREC_FORCE_PAREN,
_ => parser::PREC_POSTFIX,
};
self.print_expr_maybe_paren(func, prec);
self.print_call_post(args)
}
fn print_expr_method_call(&mut self, segment: &hir::PathSegment<'_>, args: &[hir::Expr<'_>]) {
let base_args = &args[1..];
self.print_expr_maybe_paren(&args[0], parser::PREC_POSTFIX);
self.s.word(".");
self.print_ident(segment.ident);
let generic_args = segment.args();
if !generic_args.args.is_empty() || !generic_args.bindings.is_empty() {
self.print_generic_args(generic_args, segment.infer_args, true);
}
self.print_call_post(base_args)
}
fn print_expr_binary(&mut self, op: hir::BinOp, lhs: &hir::Expr<'_>, rhs: &hir::Expr<'_>) {
let assoc_op = bin_op_to_assoc_op(op.node);
let prec = assoc_op.precedence() as i8;
let fixity = assoc_op.fixity();
let (left_prec, right_prec) = match fixity {
Fixity::Left => (prec, prec + 1),
Fixity::Right => (prec + 1, prec),
Fixity::None => (prec + 1, prec + 1),
};
let left_prec = match (&lhs.kind, op.node) {
// These cases need parens: `x as i32 < y` has the parser thinking that `i32 < y` is
// the beginning of a path type. It starts trying to parse `x as (i32 < y ...` instead
// of `(x as i32) < ...`. We need to convince it _not_ to do that.
(&hir::ExprKind::Cast { .. }, hir::BinOpKind::Lt | hir::BinOpKind::Shl) => {
parser::PREC_FORCE_PAREN
}
_ => left_prec,
};
self.print_expr_maybe_paren(lhs, left_prec);
self.s.space();
self.word_space(op.node.as_str());
self.print_expr_maybe_paren(rhs, right_prec)
}
fn print_expr_unary(&mut self, op: hir::UnOp, expr: &hir::Expr<'_>) {
self.s.word(op.as_str());
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)
}
fn print_expr_addr_of(
&mut self,
kind: hir::BorrowKind,
mutability: hir::Mutability,
expr: &hir::Expr<'_>,
) {
self.s.word("&");
match kind {
hir::BorrowKind::Ref => self.print_mutability(mutability, false),
hir::BorrowKind::Raw => {
self.word_nbsp("raw");
self.print_mutability(mutability, true);
}
}
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)
}
fn print_literal(&mut self, lit: &hir::Lit) {
self.maybe_print_comment(lit.span.lo());
self.word(lit.node.to_lit_token().to_string())
}
fn print_inline_asm(&mut self, asm: &hir::InlineAsm<'_>) {
enum AsmArg<'a> {
Template(String),
Operand(&'a hir::InlineAsmOperand<'a>),
Options(ast::InlineAsmOptions),
}
let mut args = vec![];
args.push(AsmArg::Template(ast::InlineAsmTemplatePiece::to_string(&asm.template)));
args.extend(asm.operands.iter().map(|(o, _)| AsmArg::Operand(o)));
if !asm.options.is_empty() {
args.push(AsmArg::Options(asm.options));
}
self.popen();
self.commasep(Consistent, &args, |s, arg| match arg {
AsmArg::Template(template) => s.print_string(&template, ast::StrStyle::Cooked),
AsmArg::Operand(op) => match op {
hir::InlineAsmOperand::In { reg, expr } => {
s.word("in");
s.popen();
s.word(format!("{}", reg));
s.pclose();
s.space();
s.print_expr(expr);
}
hir::InlineAsmOperand::Out { reg, late, expr } => {
s.word(if *late { "lateout" } else { "out" });
s.popen();
s.word(format!("{}", reg));
s.pclose();
s.space();
match expr {
Some(expr) => s.print_expr(expr),
None => s.word("_"),
}
}
hir::InlineAsmOperand::InOut { reg, late, expr } => {
s.word(if *late { "inlateout" } else { "inout" });
s.popen();
s.word(format!("{}", reg));
s.pclose();
s.space();
s.print_expr(expr);
}
hir::InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => {
s.word(if *late { "inlateout" } else { "inout" });
s.popen();
s.word(format!("{}", reg));
s.pclose();
s.space();
s.print_expr(in_expr);
s.space();
s.word_space("=>");
match out_expr {
Some(out_expr) => s.print_expr(out_expr),
None => s.word("_"),
}
}
hir::InlineAsmOperand::Const { anon_const } => {
s.word("const");
s.space();
s.print_anon_const(anon_const);
}
hir::InlineAsmOperand::Sym { expr } => {
s.word("sym");
s.space();
s.print_expr(expr);
}
},
AsmArg::Options(opts) => {
s.word("options");
s.popen();
let mut options = vec![];
if opts.contains(ast::InlineAsmOptions::PURE) {
options.push("pure");
}
if opts.contains(ast::InlineAsmOptions::NOMEM) {
options.push("nomem");
}
if opts.contains(ast::InlineAsmOptions::READONLY) {
options.push("readonly");
}
if opts.contains(ast::InlineAsmOptions::PRESERVES_FLAGS) {
options.push("preserves_flags");
}
if opts.contains(ast::InlineAsmOptions::NORETURN) {
options.push("noreturn");
}
if opts.contains(ast::InlineAsmOptions::NOSTACK) {
options.push("nostack");
}
if opts.contains(ast::InlineAsmOptions::ATT_SYNTAX) {
options.push("att_syntax");
}
s.commasep(Inconsistent, &options, |s, &opt| {
s.word(opt);
});
s.pclose();
}
});
self.pclose();
}
pub fn print_expr(&mut self, expr: &hir::Expr<'_>) {
self.maybe_print_comment(expr.span.lo());
self.print_outer_attributes(self.attrs(expr.hir_id));
self.ibox(INDENT_UNIT);
self.ann.pre(self, AnnNode::Expr(expr));
match expr.kind {
hir::ExprKind::Box(ref expr) => {
self.word_space("box");
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX);
}
hir::ExprKind::Array(ref exprs) => {
self.print_expr_vec(exprs);
}
hir::ExprKind::ConstBlock(ref anon_const) => {
self.print_expr_anon_const(anon_const);
}
hir::ExprKind::Repeat(ref element, ref count) => {
self.print_expr_repeat(&element, count);
}
hir::ExprKind::Struct(ref qpath, fields, ref wth) => {
self.print_expr_struct(qpath, fields, wth);
}
hir::ExprKind::Tup(ref exprs) => {
self.print_expr_tup(exprs);
}
hir::ExprKind::Call(ref func, ref args) => {
self.print_expr_call(&func, args);
}
hir::ExprKind::MethodCall(ref segment, _, ref args, _) => {
self.print_expr_method_call(segment, args);
}
hir::ExprKind::Binary(op, ref lhs, ref rhs) => {
self.print_expr_binary(op, &lhs, &rhs);
}
hir::ExprKind::Unary(op, ref expr) => {
self.print_expr_unary(op, &expr);
}
hir::ExprKind::AddrOf(k, m, ref expr) => {
self.print_expr_addr_of(k, m, &expr);
}
hir::ExprKind::Lit(ref lit) => {
self.print_literal(&lit);
}
hir::ExprKind::Cast(ref expr, ref ty) => {
let prec = AssocOp::As.precedence() as i8;
self.print_expr_maybe_paren(&expr, prec);
self.s.space();
self.word_space("as");
self.print_type(&ty);
}
hir::ExprKind::Type(ref expr, ref ty) => {
let prec = AssocOp::Colon.precedence() as i8;
self.print_expr_maybe_paren(&expr, prec);
self.word_space(":");
self.print_type(&ty);
}
hir::ExprKind::DropTemps(ref init) => {
// Print `{`:
self.cbox(INDENT_UNIT);
self.ibox(0);
self.bopen();
// Print `let _t = $init;`:
let temp = Ident::from_str("_t");
self.print_local(Some(init), |this| this.print_ident(temp));
self.s.word(";");
// Print `_t`:
self.space_if_not_bol();
self.print_ident(temp);
// Print `}`:
self.bclose_maybe_open(expr.span, true);
}
hir::ExprKind::If(ref test, ref blk, ref elseopt) => {
self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e));
}
hir::ExprKind::Loop(ref blk, opt_label, _, _) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
}
self.head("loop");
self.s.space();
self.print_block(&blk);
}
hir::ExprKind::Match(ref expr, arms, _) => {
self.cbox(INDENT_UNIT);
self.ibox(INDENT_UNIT);
self.word_nbsp("match");
self.print_expr_as_cond(&expr);
self.s.space();
self.bopen();
for arm in arms {
self.print_arm(arm);
}
self.bclose(expr.span);
}
hir::ExprKind::Closure(capture_clause, ref decl, body, _fn_decl_span, _gen) => {
self.print_capture_clause(capture_clause);
self.print_closure_params(&decl, body);
self.s.space();
// This is a bare expression.
self.ann.nested(self, Nested::Body(body));
self.end(); // need to close a box
// A box will be closed by `print_expr`, but we didn't want an overall
// wrapper so we closed the corresponding opening. so create an
// empty box to satisfy the close.
self.ibox(0);
}
hir::ExprKind::Block(ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
}
// containing cbox, will be closed by print-block at `}`
self.cbox(INDENT_UNIT);
// head-box, will be closed by print-block after `{`
self.ibox(0);
self.print_block(&blk);
}
hir::ExprKind::Assign(ref lhs, ref rhs, _) => {
let prec = AssocOp::Assign.precedence() as i8;
self.print_expr_maybe_paren(&lhs, prec + 1);
self.s.space();
self.word_space("=");
self.print_expr_maybe_paren(&rhs, prec);
}
hir::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
let prec = AssocOp::Assign.precedence() as i8;
self.print_expr_maybe_paren(&lhs, prec + 1);
self.s.space();
self.s.word(op.node.as_str());
self.word_space("=");
self.print_expr_maybe_paren(&rhs, prec);
}
hir::ExprKind::Field(ref expr, ident) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX);
self.s.word(".");
self.print_ident(ident);
}
hir::ExprKind::Index(ref expr, ref index) => {
self.print_expr_maybe_paren(&expr, parser::PREC_POSTFIX);
self.s.word("[");
self.print_expr(&index);
self.s.word("]");
}
hir::ExprKind::Path(ref qpath) => self.print_qpath(qpath, true),
hir::ExprKind::Break(destination, ref opt_expr) => {
self.s.word("break");
self.s.space();
if let Some(label) = destination.label {
self.print_ident(label.ident);
self.s.space();
}
if let Some(ref expr) = *opt_expr {
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
self.s.space();
}
}
hir::ExprKind::Continue(destination) => {
self.s.word("continue");
self.s.space();
if let Some(label) = destination.label {
self.print_ident(label.ident);
self.s.space()
}
}
hir::ExprKind::Ret(ref result) => {
self.s.word("return");
if let Some(ref expr) = *result {
self.s.word(" ");
self.print_expr_maybe_paren(&expr, parser::PREC_JUMP);
}
}
hir::ExprKind::InlineAsm(ref asm) => {
self.word("asm!");
self.print_inline_asm(asm);
}
hir::ExprKind::LlvmInlineAsm(ref a) => {
let i = &a.inner;
self.s.word("llvm_asm!");
self.popen();
self.print_symbol(i.asm, i.asm_str_style);
self.word_space(":");
let mut out_idx = 0;
self.commasep(Inconsistent, &i.outputs, |s, out| {
let constraint = out.constraint.as_str();
let mut ch = constraint.chars();
match ch.next() {
Some('=') if out.is_rw => {
s.print_string(&format!("+{}", ch.as_str()), ast::StrStyle::Cooked)
}
_ => s.print_string(&constraint, ast::StrStyle::Cooked),
}
s.popen();
s.print_expr(&a.outputs_exprs[out_idx]);
s.pclose();
out_idx += 1;
});
self.s.space();
self.word_space(":");
let mut in_idx = 0;
self.commasep(Inconsistent, &i.inputs, |s, &co| {
s.print_symbol(co, ast::StrStyle::Cooked);
s.popen();
s.print_expr(&a.inputs_exprs[in_idx]);
s.pclose();
in_idx += 1;
});
self.s.space();
self.word_space(":");
self.commasep(Inconsistent, &i.clobbers, |s, &co| {
s.print_symbol(co, ast::StrStyle::Cooked);
});
let mut options = vec![];
if i.volatile {
options.push("volatile");
}
if i.alignstack {
options.push("alignstack");
}
if i.dialect == ast::LlvmAsmDialect::Intel {
options.push("intel");
}
if !options.is_empty() {
self.s.space();
self.word_space(":");
self.commasep(Inconsistent, &options, |s, &co| {
s.print_string(co, ast::StrStyle::Cooked);
});
}
self.pclose();
}
hir::ExprKind::Yield(ref expr, _) => {
self.word_space("yield");
self.print_expr_maybe_paren(&expr, parser::PREC_JUMP);
}
hir::ExprKind::Err => {
self.popen();
self.s.word("/*ERROR*/");
self.pclose();
}
}
self.ann.post(self, AnnNode::Expr(expr));
self.end()
}
pub fn print_local_decl(&mut self, loc: &hir::Local<'_>) {
self.print_pat(&loc.pat);
if let Some(ref ty) = loc.ty {
self.word_space(":");
self.print_type(&ty);
}
}
pub fn print_name(&mut self, name: Symbol) {
self.print_ident(Ident::with_dummy_span(name))
}
pub fn print_path(&mut self, path: &hir::Path<'_>, colons_before_params: bool) {
self.maybe_print_comment(path.span.lo());
for (i, segment) in path.segments.iter().enumerate() {
if i > 0 {
self.s.word("::")
}
if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident);
self.print_generic_args(segment.args(), segment.infer_args, colons_before_params);
}
}
}
pub fn print_path_segment(&mut self, segment: &hir::PathSegment<'_>) {
if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident);
self.print_generic_args(segment.args(), segment.infer_args, false);
}
}
pub fn print_qpath(&mut self, qpath: &hir::QPath<'_>, colons_before_params: bool) {
match *qpath {
hir::QPath::Resolved(None, ref path) => self.print_path(path, colons_before_params),
hir::QPath::Resolved(Some(ref qself), ref path) => {
self.s.word("<");
self.print_type(qself);
self.s.space();
self.word_space("as");
for (i, segment) in path.segments[..path.segments.len() - 1].iter().enumerate() {
if i > 0 {
self.s.word("::")
}
if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident);
self.print_generic_args(
segment.args(),
segment.infer_args,
colons_before_params,
);
}
}
self.s.word(">");
self.s.word("::");
let item_segment = path.segments.last().unwrap();
self.print_ident(item_segment.ident);
self.print_generic_args(
item_segment.args(),
item_segment.infer_args,
colons_before_params,
)
}
hir::QPath::TypeRelative(ref qself, ref item_segment) => {
// If we've got a compound-qualified-path, let's push an additional pair of angle
// brackets, so that we pretty-print `<<A::B>::C>` as `<A::B>::C`, instead of just
// `A::B::C` (since the latter could be ambiguous to the user)
if let hir::TyKind::Path(hir::QPath::Resolved(None, _)) = &qself.kind {
self.print_type(qself);
} else {
self.s.word("<");
self.print_type(qself);
self.s.word(">");
}
self.s.word("::");
self.print_ident(item_segment.ident);
self.print_generic_args(
item_segment.args(),
item_segment.infer_args,
colons_before_params,
)
}
hir::QPath::LangItem(lang_item, span) => {
self.s.word("#[lang = \"");
self.print_ident(Ident::new(lang_item.name(), span));
self.s.word("\"]");
}
}
}
fn print_generic_args(
&mut self,
generic_args: &hir::GenericArgs<'_>,
infer_args: bool,
colons_before_params: bool,
) {
if generic_args.parenthesized {
self.s.word("(");
self.commasep(Inconsistent, generic_args.inputs(), |s, ty| s.print_type(&ty));
self.s.word(")");
self.space_if_not_bol();
self.word_space("->");
self.print_type(generic_args.bindings[0].ty());
} else {
let start = if colons_before_params { "::<" } else { "<" };
let empty = Cell::new(true);
let start_or_comma = |this: &mut Self| {
if empty.get() {
empty.set(false);
this.s.word(start)
} else {
this.word_space(",")
}
};
let mut nonelided_generic_args: bool = false;
let elide_lifetimes = generic_args.args.iter().all(|arg| match arg {
GenericArg::Lifetime(lt) => lt.is_elided(),
_ => {
nonelided_generic_args = true;
true
}
});
if nonelided_generic_args {
start_or_comma(self);
self.commasep(
Inconsistent,
&generic_args.args,
|s, generic_arg| match generic_arg {
GenericArg::Lifetime(lt) if !elide_lifetimes => s.print_lifetime(lt),
GenericArg::Lifetime(_) => {}
GenericArg::Type(ty) => s.print_type(ty),
GenericArg::Const(ct) => s.print_anon_const(&ct.value),
},
);
}
// FIXME(eddyb): this would leak into error messages (e.g.,
// "non-exhaustive patterns: `Some::<..>(_)` not covered").
if infer_args && false {
start_or_comma(self);
self.s.word("..");
}
for binding in generic_args.bindings.iter() {
start_or_comma(self);
self.print_ident(binding.ident);
self.print_generic_args(binding.gen_args, false, false);
self.s.space();
match generic_args.bindings[0].kind {
hir::TypeBindingKind::Equality { ref ty } => {
self.word_space("=");
self.print_type(ty);
}
hir::TypeBindingKind::Constraint { bounds } => {
self.print_bounds(":", bounds);
}
}
}
if !empty.get() {
self.s.word(">")
}
}
}
pub fn print_pat(&mut self, pat: &hir::Pat<'_>) {
self.maybe_print_comment(pat.span.lo());
self.ann.pre(self, AnnNode::Pat(pat));
// Pat isn't normalized, but the beauty of it
// is that it doesn't matter
match pat.kind {
PatKind::Wild => self.s.word("_"),
PatKind::Binding(binding_mode, _, ident, ref sub) => {
match binding_mode {
hir::BindingAnnotation::Ref => {
self.word_nbsp("ref");
self.print_mutability(hir::Mutability::Not, false);
}
hir::BindingAnnotation::RefMut => {
self.word_nbsp("ref");
self.print_mutability(hir::Mutability::Mut, false);
}
hir::BindingAnnotation::Unannotated => {}
hir::BindingAnnotation::Mutable => {
self.word_nbsp("mut");
}
}
self.print_ident(ident);
if let Some(ref p) = *sub {
self.s.word("@");
self.print_pat(&p);
}
}
PatKind::TupleStruct(ref qpath, ref elts, ddpos) => {
self.print_qpath(qpath, true);
self.popen();
if let Some(ddpos) = ddpos {
self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p));
if ddpos != 0 {
self.word_space(",");
}
self.s.word("..");
if ddpos != elts.len() {
self.s.word(",");
self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p));
}
} else {
self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p));
}
self.pclose();
}
PatKind::Path(ref qpath) => {
self.print_qpath(qpath, true);
}
PatKind::Struct(ref qpath, ref fields, etc) => {
self.print_qpath(qpath, true);
self.nbsp();
self.word_space("{");
self.commasep_cmnt(
Consistent,
&fields[..],
|s, f| {
s.cbox(INDENT_UNIT);
if !f.is_shorthand {
s.print_ident(f.ident);
s.word_nbsp(":");
}
s.print_pat(&f.pat);
s.end()
},
|f| f.pat.span,
);
if etc {
if !fields.is_empty() {
self.word_space(",");
}
self.s.word("..");
}
self.s.space();
self.s.word("}");
}
PatKind::Or(ref pats) => {
self.strsep("|", true, Inconsistent, &pats[..], |s, p| s.print_pat(&p));
}
PatKind::Tuple(ref elts, ddpos) => {
self.popen();
if let Some(ddpos) = ddpos {
self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p));
if ddpos != 0 {
self.word_space(",");
}
self.s.word("..");
if ddpos != elts.len() {
self.s.word(",");
self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p));
}
} else {
self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p));
if elts.len() == 1 {
self.s.word(",");
}
}
self.pclose();
}
PatKind::Box(ref inner) => {
let is_range_inner = matches!(inner.kind, PatKind::Range(..));
self.s.word("box ");
if is_range_inner {
self.popen();
}
self.print_pat(&inner);
if is_range_inner {
self.pclose();
}
}
PatKind::Ref(ref inner, mutbl) => {
let is_range_inner = matches!(inner.kind, PatKind::Range(..));
self.s.word("&");
self.s.word(mutbl.prefix_str());
if is_range_inner {
self.popen();
}
self.print_pat(&inner);
if is_range_inner {
self.pclose();
}
}
PatKind::Lit(ref e) => self.print_expr(&e),
PatKind::Range(ref begin, ref end, ref end_kind) => {
if let Some(expr) = begin {
self.print_expr(expr);
self.s.space();
}
match *end_kind {
RangeEnd::Included => self.s.word("..."),
RangeEnd::Excluded => self.s.word(".."),
}
if let Some(expr) = end {
self.print_expr(expr);
}
}
PatKind::Slice(ref before, ref slice, ref after) => {
self.s.word("[");
self.commasep(Inconsistent, &before[..], |s, p| s.print_pat(&p));
if let Some(ref p) = *slice {
if !before.is_empty() {
self.word_space(",");
}
if let PatKind::Wild = p.kind {
// Print nothing.
} else {
self.print_pat(&p);
}
self.s.word("..");
if !after.is_empty() {
self.word_space(",");
}
}
self.commasep(Inconsistent, &after[..], |s, p| s.print_pat(&p));
self.s.word("]");
}
}
self.ann.post(self, AnnNode::Pat(pat))
}
pub fn print_param(&mut self, arg: &hir::Param<'_>) {
self.print_outer_attributes(self.attrs(arg.hir_id));
self.print_pat(&arg.pat);
}
pub fn print_arm(&mut self, arm: &hir::Arm<'_>) {
// I have no idea why this check is necessary, but here it
// is :(
if self.attrs(arm.hir_id).is_empty() {
self.s.space();
}
self.cbox(INDENT_UNIT);
self.ann.pre(self, AnnNode::Arm(arm));
self.ibox(0);
self.print_outer_attributes(&self.attrs(arm.hir_id));
self.print_pat(&arm.pat);
self.s.space();
if let Some(ref g) = arm.guard {
match g {
hir::Guard::If(e) => {
self.word_space("if");
self.print_expr(&e);
self.s.space();
}
hir::Guard::IfLet(pat, e) => {
self.word_nbsp("if");
self.word_nbsp("let");
self.print_pat(&pat);
self.s.space();
self.word_space("=");
self.print_expr(&e);
self.s.space();
}
}
}
self.word_space("=>");
match arm.body.kind {
hir::ExprKind::Block(ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
}
// the block will close the pattern's ibox
self.print_block_unclosed(&blk);
// If it is a user-provided unsafe block, print a comma after it
if let hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::UserProvided) = blk.rules
{
self.s.word(",");
}
}
_ => {
self.end(); // close the ibox for the pattern
self.print_expr(&arm.body);
self.s.word(",");
}
}
self.ann.post(self, AnnNode::Arm(arm));
self.end() // close enclosing cbox
}
pub fn print_fn(
&mut self,
decl: &hir::FnDecl<'_>,
header: hir::FnHeader,
name: Option<Symbol>,
generics: &hir::Generics<'_>,
vis: &hir::Visibility<'_>,
arg_names: &[Ident],
body_id: Option<hir::BodyId>,
) {
self.print_fn_header_info(header, vis);
if let Some(name) = name {
self.nbsp();
self.print_name(name);
}
self.print_generic_params(&generics.params);
self.popen();
let mut i = 0;
// Make sure we aren't supplied *both* `arg_names` and `body_id`.
assert!(arg_names.is_empty() || body_id.is_none());
self.commasep(Inconsistent, &decl.inputs, |s, ty| {
s.ibox(INDENT_UNIT);
if let Some(arg_name) = arg_names.get(i) {
s.s.word(arg_name.to_string());
s.s.word(":");
s.s.space();
} else if let Some(body_id) = body_id {
s.ann.nested(s, Nested::BodyParamPat(body_id, i));
s.s.word(":");
s.s.space();
}
i += 1;
s.print_type(ty);
s.end()
});
if decl.c_variadic {
self.s.word(", ...");
}
self.pclose();
self.print_fn_output(decl);
self.print_where_clause(&generics.where_clause)
}
fn print_closure_params(&mut self, decl: &hir::FnDecl<'_>, body_id: hir::BodyId) {
self.s.word("|");
let mut i = 0;
self.commasep(Inconsistent, &decl.inputs, |s, ty| {
s.ibox(INDENT_UNIT);
s.ann.nested(s, Nested::BodyParamPat(body_id, i));
i += 1;
if let hir::TyKind::Infer = ty.kind {
// Print nothing.
} else {
s.s.word(":");
s.s.space();
s.print_type(ty);
}
s.end();
});
self.s.word("|");
if let hir::FnRetTy::DefaultReturn(..) = decl.output {
return;
}
self.space_if_not_bol();
self.word_space("->");
match decl.output {
hir::FnRetTy::Return(ref ty) => {
self.print_type(&ty);
self.maybe_print_comment(ty.span.lo())
}
hir::FnRetTy::DefaultReturn(..) => unreachable!(),
}
}
pub fn print_capture_clause(&mut self, capture_clause: hir::CaptureBy) {
match capture_clause {
hir::CaptureBy::Value => self.word_space("move"),
hir::CaptureBy::Ref => {}
}
}
pub fn print_bounds<'b>(
&mut self,
prefix: &'static str,
bounds: impl IntoIterator<Item = &'b hir::GenericBound<'b>>,
) {
let mut first = true;
for bound in bounds {
if first {
self.s.word(prefix);
}
if !(first && prefix.is_empty()) {
self.nbsp();
}
if first {
first = false;
} else {
self.word_space("+");
}
match bound {
GenericBound::Trait(tref, modifier) => {
if modifier == &TraitBoundModifier::Maybe {
self.s.word("?");
}
self.print_poly_trait_ref(tref);
}
GenericBound::LangItemTrait(lang_item, span, ..) => {
self.s.word("#[lang = \"");
self.print_ident(Ident::new(lang_item.name(), *span));
self.s.word("\"]");
}
GenericBound::Outlives(lt) => {
self.print_lifetime(lt);
}
}
}
}
pub fn print_generic_params(&mut self, generic_params: &[GenericParam<'_>]) {
if !generic_params.is_empty() {
self.s.word("<");
self.commasep(Inconsistent, generic_params, |s, param| s.print_generic_param(param));
self.s.word(">");
}
}
pub fn print_generic_param(&mut self, param: &GenericParam<'_>) {
if let GenericParamKind::Const { .. } = param.kind {
self.word_space("const");
}
self.print_ident(param.name.ident());
match param.kind {
GenericParamKind::Lifetime { .. } => {
let mut sep = ":";
for bound in param.bounds {
match bound {
GenericBound::Outlives(ref lt) => {
self.s.word(sep);
self.print_lifetime(lt);
sep = "+";
}
_ => panic!(),
}
}
}
GenericParamKind::Type { ref default, .. } => {
self.print_bounds(":", param.bounds);
if let Some(default) = default {
self.s.space();
self.word_space("=");
self.print_type(&default)
}
}
GenericParamKind::Const { ref ty, ref default } => {
self.word_space(":");
self.print_type(ty);
if let Some(ref default) = default {
self.s.space();
self.word_space("=");
self.print_anon_const(&default)
}
}
}
}
pub fn print_lifetime(&mut self, lifetime: &hir::Lifetime) {
self.print_ident(lifetime.name.ident())
}
pub fn print_where_clause(&mut self, where_clause: &hir::WhereClause<'_>) {
if where_clause.predicates.is_empty() {
return;
}
self.s.space();
self.word_space("where");
for (i, predicate) in where_clause.predicates.iter().enumerate() {
if i != 0 {
self.word_space(",");
}
match predicate {
hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
bound_generic_params,
bounded_ty,
bounds,
..
}) => {
self.print_formal_generic_params(bound_generic_params);
self.print_type(&bounded_ty);
self.print_bounds(":", *bounds);
}
hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
lifetime,
bounds,
..
}) => {
self.print_lifetime(lifetime);
self.s.word(":");
for (i, bound) in bounds.iter().enumerate() {
match bound {
GenericBound::Outlives(lt) => {
self.print_lifetime(lt);
}
_ => panic!(),
}
if i != 0 {
self.s.word(":");
}
}
}
hir::WherePredicate::EqPredicate(hir::WhereEqPredicate {
lhs_ty, rhs_ty, ..
}) => {
self.print_type(lhs_ty);
self.s.space();
self.word_space("=");
self.print_type(rhs_ty);
}
}
}
}
pub fn print_mutability(&mut self, mutbl: hir::Mutability, print_const: bool) {
match mutbl {
hir::Mutability::Mut => self.word_nbsp("mut"),
hir::Mutability::Not => {
if print_const {
self.word_nbsp("const")
}
}
}
}
pub fn print_mt(&mut self, mt: &hir::MutTy<'_>, print_const: bool) {
self.print_mutability(mt.mutbl, print_const);
self.print_type(&mt.ty)
}
pub fn print_fn_output(&mut self, decl: &hir::FnDecl<'_>) {
if let hir::FnRetTy::DefaultReturn(..) = decl.output {
return;
}
self.space_if_not_bol();
self.ibox(INDENT_UNIT);
self.word_space("->");
match decl.output {
hir::FnRetTy::DefaultReturn(..) => unreachable!(),
hir::FnRetTy::Return(ref ty) => self.print_type(&ty),
}
self.end();
if let hir::FnRetTy::Return(ref output) = decl.output {
self.maybe_print_comment(output.span.lo())
}
}
pub fn print_ty_fn(
&mut self,
abi: Abi,
unsafety: hir::Unsafety,
decl: &hir::FnDecl<'_>,
name: Option<Symbol>,
generic_params: &[hir::GenericParam<'_>],
arg_names: &[Ident],
) {
self.ibox(INDENT_UNIT);
if !generic_params.is_empty() {
self.s.word("for");
self.print_generic_params(generic_params);
}
let generics = hir::Generics {
params: &[],
where_clause: hir::WhereClause { predicates: &[], span: rustc_span::DUMMY_SP },
span: rustc_span::DUMMY_SP,
};
self.print_fn(
decl,
hir::FnHeader {
unsafety,
abi,
constness: hir::Constness::NotConst,
asyncness: hir::IsAsync::NotAsync,
},
name,
&generics,
&Spanned { span: rustc_span::DUMMY_SP, node: hir::VisibilityKind::Inherited },
arg_names,
None,
);
self.end();
}
pub fn maybe_print_trailing_comment(
&mut self,
span: rustc_span::Span,
next_pos: Option<BytePos>,
) {
if let Some(cmnts) = self.comments() {
if let Some(cmnt) = cmnts.trailing_comment(span, next_pos) {
self.print_comment(&cmnt);
}
}
}
pub fn print_remaining_comments(&mut self) {
// If there aren't any remaining comments, then we need to manually
// make sure there is a line break at the end.
if self.next_comment().is_none() {
self.s.hardbreak();
}
while let Some(ref cmnt) = self.next_comment() {
self.print_comment(cmnt)
}
}
pub fn print_fn_header_info(&mut self, header: hir::FnHeader, vis: &hir::Visibility<'_>) {
self.s.word(visibility_qualified(vis, ""));
match header.constness {
hir::Constness::NotConst => {}
hir::Constness::Const => self.word_nbsp("const"),
}
match header.asyncness {
hir::IsAsync::NotAsync => {}
hir::IsAsync::Async => self.word_nbsp("async"),
}
self.print_unsafety(header.unsafety);
if header.abi != Abi::Rust {
self.word_nbsp("extern");
self.word_nbsp(header.abi.to_string());
}
self.s.word("fn")
}
pub fn print_unsafety(&mut self, s: hir::Unsafety) {
match s {
hir::Unsafety::Normal => {}
hir::Unsafety::Unsafe => self.word_nbsp("unsafe"),
}
}
pub fn print_is_auto(&mut self, s: hir::IsAuto) {
match s {
hir::IsAuto::Yes => self.word_nbsp("auto"),
hir::IsAuto::No => {}
}
}
}
/// Does this expression require a semicolon to be treated
/// as a statement? The negation of this: 'can this expression
/// be used as a statement without a semicolon' -- is used
/// as an early-bail-out in the parser so that, for instance,
/// if true {...} else {...}
/// |x| 5
/// isn't parsed as (if true {...} else {...} | x) | 5
//
// Duplicated from `parse::classify`, but adapted for the HIR.
fn expr_requires_semi_to_be_stmt(e: &hir::Expr<'_>) -> bool {
!matches!(
e.kind,
hir::ExprKind::If(..)
| hir::ExprKind::Match(..)
| hir::ExprKind::Block(..)
| hir::ExprKind::Loop(..)
)
}
/// This statement requires a semicolon after it.
/// note that in one case (stmt_semi), we've already
/// seen the semicolon, and thus don't need another.
fn stmt_ends_with_semi(stmt: &hir::StmtKind<'_>) -> bool {
match *stmt {
hir::StmtKind::Local(_) => true,
hir::StmtKind::Item(_) => false,
hir::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(&e),
hir::StmtKind::Semi(..) => false,
}
}
fn bin_op_to_assoc_op(op: hir::BinOpKind) -> AssocOp {
use crate::hir::BinOpKind::*;
match op {
Add => AssocOp::Add,
Sub => AssocOp::Subtract,
Mul => AssocOp::Multiply,
Div => AssocOp::Divide,
Rem => AssocOp::Modulus,
And => AssocOp::LAnd,
Or => AssocOp::LOr,
BitXor => AssocOp::BitXor,
BitAnd => AssocOp::BitAnd,
BitOr => AssocOp::BitOr,
Shl => AssocOp::ShiftLeft,
Shr => AssocOp::ShiftRight,
Eq => AssocOp::Equal,
Lt => AssocOp::Less,
Le => AssocOp::LessEqual,
Ne => AssocOp::NotEqual,
Ge => AssocOp::GreaterEqual,
Gt => AssocOp::Greater,
}
}
/// Expressions that syntactically contain an "exterior" struct literal, i.e., not surrounded by any
/// parens or other delimiters, e.g., `X { y: 1 }`, `X { y: 1 }.method()`, `foo == X { y: 1 }` and
/// `X { y: 1 } == foo` all do, but `(X { y: 1 }) == foo` does not.
fn contains_exterior_struct_lit(value: &hir::Expr<'_>) -> bool {
match value.kind {
hir::ExprKind::Struct(..) => true,
hir::ExprKind::Assign(ref lhs, ref rhs, _)
| hir::ExprKind::AssignOp(_, ref lhs, ref rhs)
| hir::ExprKind::Binary(_, ref lhs, ref rhs) => {
// `X { y: 1 } + X { y: 2 }`
contains_exterior_struct_lit(&lhs) || contains_exterior_struct_lit(&rhs)
}
hir::ExprKind::Unary(_, ref x)
| hir::ExprKind::Cast(ref x, _)
| hir::ExprKind::Type(ref x, _)
| hir::ExprKind::Field(ref x, _)
| hir::ExprKind::Index(ref x, _) => {
// `&X { y: 1 }, X { y: 1 }.y`
contains_exterior_struct_lit(&x)
}
hir::ExprKind::MethodCall(.., ref exprs, _) => {
// `X { y: 1 }.bar(...)`
contains_exterior_struct_lit(&exprs[0])
}
_ => false,
}
}
| 35.780763 | 100 | 0.468618 |
c182394a117b4489b7c9e928e956a96c94e86dea | 152 | mod axis;
pub use self::axis::Axis;
mod input;
pub use self::input::Input;
pub mod keyboard;
pub mod mouse;
mod button;
pub use self::button::Button;
| 13.818182 | 29 | 0.717105 |
1a42f31ff64379051f43a7219cc16ec8eee50911 | 965 | //! Checks that files that should be rejected are rejected
extern crate tempfile;
extern crate png_inflate_derive;
use ::std::path::Path;
use ::std::process::Command;
use tempfile::NamedTempFile;
use png_inflate_derive::generate_for_each_files;
const PROGRAM_EXE:&str = env!("CARGO_BIN_EXE_png_inflate");
generate_for_each_files!();
fn test_one(infile:&Path) {
let outfile = NamedTempFile::new().expect("");
let outfile = outfile.into_temp_path();
let output = Command::new(PROGRAM_EXE)
.arg(&infile)
.arg(&outfile)
.output()
.expect("failed to execute subprocess");
assert!(!output.status.success(), "subprocess execution should not have been success\n\n-- stderr:\n{}\n", std::str::from_utf8(&output.stderr).expect(""));
// TODO: check the message?
assert!(outfile.metadata().expect("").len() == 0, "outfile was written to: {}", outfile.metadata().expect("").len());
}
for_each_badmagic_file!(test_one);
for_each_badchecksum_file!(test_one);
| 31.129032 | 156 | 0.727461 |
56f44b28eb1b0e51f29de54269055e833583a499 | 5,000 | use gura::{
errors::Error,
object,
parser::{dump, parse, GuraType},
};
use std::f64::{INFINITY, NAN, NEG_INFINITY};
mod common;
fn get_expected() -> GuraType {
object! {
a_string: "test string",
int1: 99,
int2: 42,
int3: 0,
int4: -17,
int5: 1000,
int6: 5349221,
int7: 5349221,
hex1: 3735928559,
hex2: 3735928559,
hex3: 3735928559,
oct1: 342391,
oct2: 493,
bin1: 214,
flt1: 1.0,
flt2: 3.1415,
flt3: -0.01,
flt4: 5e+22,
flt5: 1e06,
flt6: -2E-2,
flt7: 6.626e-34,
flt8: 224617.445991228,
sf1: INFINITY,
sf2: INFINITY,
sf3: NEG_INFINITY,
null: null,
empty_single: {},
bool1: true,
bool2: false,
1234: "1234",
services: {
nginx: {
host: "127.0.0.1",
port: 80
},
apache: {
virtual_host: "10.10.10.4",
port: 81
}
},
integers: [1, 2, 3],
colors: ["red", "yellow", "green"],
nested_arrays_of_ints: [[1, 2], [3, 4, 5]],
nested_mixed_array: [[1, 2], ["a", "b", "c"]],
numbers: [0.1, 0.2, 0.5, 1, 2, 5],
tango_singers: [
{
user1: {
name: "Carlos",
surname: "Gardel",
year_of_birth: 1890
}
}, {
user2: {
name: "Aníbal",
surname: "Troilo",
year_of_birth: 1914
}
}
],
integers2: [
1, 2, 3
],
integers3: [
1,
2
],
my_server: {
host: "127.0.0.1",
empty_nested: {},
port: 8080,
native_auth: true
},
gura_is_cool: "Gura is cool"
}
}
const PARENT_FOLDER: &str = "full";
#[test]
/// Tests all the common cases except NaNs
fn test_parse() {
let parsed_data = common::get_file_content_parsed(PARENT_FOLDER, "full.ura").unwrap();
assert_eq!(parsed_data, get_expected());
}
#[test]
/// Tests NaNs cases as they are an exceptional case
fn test_loads_nan() {
let parsed_data = common::get_file_content_parsed(PARENT_FOLDER, "nan.ura").unwrap();
for (_, value) in parsed_data.iter().unwrap() {
assert_eq!(*value, NAN);
}
}
#[test]
/// Tests dumps method consistency
fn test_dumps() {
let parsed_data = common::get_file_content_parsed(PARENT_FOLDER, "full.ura").unwrap();
let string_data = dump(&parsed_data);
let new_parsed_data = parse(&string_data).unwrap();
assert_eq!(new_parsed_data, get_expected());
}
#[test]
/// Tests dumps method result
fn test_dumps_result() {
let str = r##"foo: [
bar:
baz: [
far: [
faz: "foo"
],
far: "faz",
far: "faz"
],
[empty, empty, empty],
[
foo:
hi: "bar"
bye: [
foo: [
bar:
baz: [
far: [
faz: "foo"
],
far: "faz",
far: "faz"
],
[empty, empty, empty],
[
foo:
hi: "bar"
bye: []
]
]
]
]
]"##;
let parsed = parse(&str).unwrap();
let dumped = dump(&parsed);
assert_eq!(str, dumped);
}
#[test]
/// Tests dumps method with NaNs values
fn test_dumps_nan() {
let parsed_data = common::get_file_content_parsed(PARENT_FOLDER, "nan.ura").unwrap();
let string_data_nan = dump(&parsed_data);
let new_parsed_data = parse(&string_data_nan).unwrap();
for (_, value) in new_parsed_data.iter().unwrap() {
assert_eq!(*value, NAN);
}
}
#[test]
/// Tests empty Gura documents
fn test_empty() {
let parsed_data = parse(&"").unwrap();
assert_eq!(parsed_data, object! {});
}
#[test]
/// Tests empty Gura documents, even when some data is defined
fn test_empty_2() {
let parsed_data = parse(&"$unused_var: 5").unwrap();
assert_eq!(parsed_data, object! {});
}
#[test]
/// Tests invalid key
fn test_invalid_key() {
let parsed_data = parse(&"with.dot: 5");
assert_eq!(parsed_data.unwrap_err().kind, Error::ParseError);
}
#[test]
/// Tests invalid key
fn test_invalid_key_2() {
let parsed_data = parse(&"\"with_quotes\": 5");
assert_eq!(parsed_data.unwrap_err().kind, Error::ParseError);
}
#[test]
/// Tests invalid key
fn test_invalid_key_3() {
let parsed_data = parse(&"with-dashes: 5");
assert_eq!(parsed_data.unwrap_err().kind, Error::ParseError);
}
| 24.752475 | 90 | 0.4756 |
1a3fbf77c133faee04fba3468a6f4417e095ede0 | 5,733 | extern crate serial_core as serial;
use serial::prelude::*;
use std::io;
// maximum rx buffer len: extended CAN frame with timestamp
const SLCAN_MTU: usize = "T1111222281122334455667788EA5F\r".len() + 1;
const SLCAN_CMD_LEN: usize = 1;
const SLCAN_SDD_ID_LEN: usize = 3;
const BELL: u8 = 0x07;
const CARRIAGE_RETURN: u8 = '\r' as u8;
const TRANSMIT_COMMAND: u8 = 't' as u8;
const HEX_LUT: &[u8] = "0123456789ABCDEF".as_bytes();
#[repr(u8)]
pub enum BitRate {
Setup10Kbit = '0' as u8,
Setup20Kbit = '1' as u8,
Setup50Kbit = '2' as u8,
Setup100Kbit = '3' as u8,
Setup125Kbit = '4' as u8,
Setup250Kbit = '5' as u8,
Setup500Kbit = '6' as u8,
Setup800Kbit = '7' as u8,
Setup1Mbit = '8' as u8,
}
pub struct CanFrame {
pub id: u32,
pub dlc: usize,
pub data: [u8; 8],
}
pub struct CanSocket<P: SerialPort> {
port: P,
rbuff: [u8; SLCAN_MTU],
rcount: usize,
error: bool,
}
fn hextou8(s: u8) -> Result<u8, ()> {
let c = s as char;
if c >= '0' && c <= '9' {
Ok(s - '0' as u8)
} else if c >= 'a' && c <= 'f' {
Ok(s - 'a' as u8 + 10)
} else if c >= 'A' && c <= 'F' {
Ok(s - 'A' as u8 + 10)
} else {
Err(())
}
}
fn hex2tou8(s: &[u8]) -> Result<u8, ()> {
let msn = hextou8(s[0])?;
let lsn = hextou8(s[1])?;
Ok((msn << 4) | lsn)
}
fn unpack_data(s: &[u8], len: usize) -> Result<[u8; 8], ()> {
let mut buf = [u8::default(); 8];
for i in 0..len {
let offset = 2 * i;
buf[i] = hex2tou8(&s[offset..])?;
}
Ok(buf)
}
fn hextou32(buf: &[u8]) -> Result<u32, ()> {
let mut value = 0u32;
for s in buf.iter() {
value <<= 4;
match hextou8(*s) {
Ok(byte) => value |= byte as u32,
Err(_) => return Err(()),
}
}
Ok(value)
}
fn hexdigit(value: u32) -> u8 {
HEX_LUT[(value & 0xF) as usize]
}
fn u32tohex3(value: u32) -> [u8; 3] {
[
hexdigit(value >> 8),
hexdigit(value >> 4),
hexdigit(value >> 0),
]
}
fn bytestohex(data: &[u8]) -> Vec<u8> {
let mut buf = Vec::<u8>::with_capacity(2 * data.len());
for byte in data {
buf.push(hexdigit((byte >> 4) as u32));
buf.push(hexdigit((byte >> 0) as u32));
}
buf
}
impl CanFrame {
pub fn new(id: u32, dlc: usize, data: &[u8]) -> Self {
let mut copy = [u8::default(); 8];
copy[..data.len()].copy_from_slice(data);
Self {
id,
dlc,
data: copy,
}
}
}
impl std::fmt::Display for CanFrame {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"CanFrame{{ id: {}, dlc: {}, data: {:?} }}",
self.id, self.dlc, self.data
)
}
}
impl<P: SerialPort> CanSocket<P> {
pub fn new(port: P) -> Self {
CanSocket {
port,
rbuff: [0; SLCAN_MTU],
rcount: 0,
error: false,
}
}
pub fn open(&mut self, bitrate: BitRate) -> io::Result<()> {
self.port.write(&['S' as u8, bitrate as u8, '\r' as u8])?;
self.port.write(&['O' as u8, '\r' as u8])?;
Ok(())
}
pub fn close(&mut self) -> io::Result<()> {
self.port.write("C\r".as_bytes())?;
Ok(())
}
pub fn write(&mut self, id: u32, data: &[u8]) -> io::Result<usize> {
let dlc = data.len();
if dlc > 8 {
return Err(io::Error::new(io::ErrorKind::InvalidInput, "data length"));
}
let mut buf = Vec::<u8>::with_capacity(6 + 2 * dlc);
buf.push('t' as u8);
buf.extend_from_slice(&u32tohex3(id));
buf.push(hexdigit(dlc as u32));
buf.extend_from_slice(&bytestohex(data));
buf.push('\r' as u8);
self.port.write(buf.as_slice())
}
pub fn read(&mut self) -> io::Result<CanFrame> {
let mut buf = [0u8; 1];
let mut len = self.port.read(&mut buf)?;
while len == 1usize {
let s = buf[0];
if s == CARRIAGE_RETURN || s == BELL {
let valid = !self.error && self.rcount > 4;
self.error = false;
self.rcount = 0;
if valid {
return self.bump();
}
} else if !self.error {
if self.rcount < SLCAN_MTU {
self.rbuff[self.rcount] = s;
self.rcount += 1;
} else {
self.error = true;
}
}
len = self.port.read(&mut buf)?;
}
Err(io::Error::new(io::ErrorKind::WouldBlock, ""))
}
fn bump(&mut self) -> io::Result<CanFrame> {
let cmd = self.rbuff[0];
match cmd {
TRANSMIT_COMMAND => {
let id =
match hextou32(&self.rbuff[SLCAN_CMD_LEN..SLCAN_CMD_LEN + SLCAN_SDD_ID_LEN]) {
Ok(value) => value,
Err(()) => return Err(io::Error::new(io::ErrorKind::WouldBlock, "")),
};
let dlc = (self.rbuff[SLCAN_CMD_LEN + SLCAN_SDD_ID_LEN] - 0x30) as usize;
if let Ok(data) =
unpack_data(&self.rbuff[SLCAN_CMD_LEN + SLCAN_SDD_ID_LEN + 1..], dlc)
{
Ok(CanFrame { id, dlc, data })
} else {
Err(io::Error::new(io::ErrorKind::InvalidData, ""))
}
}
_ => Err(io::Error::new(io::ErrorKind::WouldBlock, "")),
}
}
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
| 23.788382 | 98 | 0.471132 |
bf43c61a1c023d10e1bc65a29147e8edcf856711 | 4,492 | use super::input::{InputData, Item as InputItem};
use super::{Issue, IssueId, Link};
use crate::fetcher::IssueData;
use crate::query::Repo;
use crate::{RFC_REPO, RUSTC_REPO};
use semver::Version;
use serde::Serialize;
use std::collections::HashMap;
pub struct OutputData(pub HashMap<String, Vec<Item>>);
impl OutputData {
pub fn from_input(input: InputData, issue_data: &IssueData, latest_stable: &Version) -> Self {
let builder = Builder {
issue_data,
latest_stable,
};
builder.build(input)
}
}
#[derive(Debug, Serialize)]
pub struct Item {
pub title: String,
pub rfc: Option<Rfc>,
pub tracking: Option<Issue>,
pub issue_label: Option<String>,
pub issues: Vec<Issue>,
pub stabilized: Option<Stabilization>,
pub unresolved: Option<Rfc>,
pub link: Option<Link>,
pub deps: Vec<Item>,
}
#[derive(Debug, Serialize)]
pub struct Rfc {
issue: Issue,
url: String,
merged: bool,
}
#[derive(Debug, Serialize)]
pub struct Stabilization {
pub state: VersionState,
pub version: String,
pub pr: Issue,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "lowercase")]
pub enum VersionState {
Stable,
Beta,
Nightly,
}
struct Builder<'a> {
issue_data: &'a IssueData,
latest_stable: &'a Version,
}
impl Builder<'_> {
fn build(&self, input: InputData) -> OutputData {
let result = input
.0
.into_iter()
.map(|(key, items)| (key, self.convert_items(items)))
.collect();
OutputData(result)
}
fn convert_items(&self, items: Vec<InputItem>) -> Vec<Item> {
items
.into_iter()
.map(|item| self.convert_item(item))
.collect()
}
fn convert_item(&self, item: InputItem) -> Item {
Item {
title: item.title,
rfc: self.convert_rfc(item.rfc),
tracking: self.get_optional_issue(&*RUSTC_REPO, item.tracking),
issues: item
.issue_label
.as_ref()
.map(|label| {
self.issue_data
// TODO Don't clone?
.labels[&(RUSTC_REPO.clone(), label.clone())]
.iter()
.map(|id| self.get_issue(&*RUSTC_REPO, *id))
.collect()
})
.unwrap_or_default(),
issue_label: item.issue_label,
stabilized: item.stabilized.map(|stabilized| Stabilization {
state: self.get_version_state(&stabilized.version),
version: stabilized.version,
pr: self.get_issue(&*RUSTC_REPO, stabilized.pr),
}),
unresolved: self.convert_rfc(item.unresolved),
link: item.link,
deps: self.convert_items(item.deps),
}
}
fn get_version_state(&self, version: &str) -> VersionState {
let version =
Version::parse(&format!("{}.0", version)).expect("invalid stabilization version");
if *self.latest_stable >= version {
return VersionState::Stable;
}
let mut beta = self.latest_stable.clone();
beta.increment_minor();
if beta >= version {
return VersionState::Beta;
}
VersionState::Nightly
}
fn convert_rfc(&self, rfc: Option<String>) -> Option<Rfc> {
let rfc = rfc?;
let dash = rfc.find('-');
let number = rfc[..dash.unwrap_or_else(|| rfc.len())]
.parse()
.expect("unexpected rfc number");
let (url, merged) = if dash.is_none() {
(
format!("https://github.com/rust-lang/rfcs/pull/{}", rfc),
false,
)
} else {
let hash = rfc.find('#').unwrap_or_else(|| rfc.len());
let (page, frag) = rfc.split_at(hash);
(
format!("https://rust-lang.github.io/rfcs/{}.html{}", page, frag),
true,
)
};
let issue = self.get_issue(&*RFC_REPO, number);
Some(Rfc { issue, url, merged })
}
fn get_optional_issue(&self, repo: &Repo, id: Option<IssueId>) -> Option<Issue> {
id.map(|id| self.get_issue(repo, id))
}
fn get_issue(&self, repo: &Repo, id: IssueId) -> Issue {
// TODO Don't clone?
self.issue_data.issues[&(repo.clone(), id)].clone()
}
}
| 29.168831 | 98 | 0.54163 |
64d0d4777802cd46757a047506c68b98b56e4579 | 2,482 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::PIR0 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct INSELR {
bits: u8,
}
impl INSELR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _INSELW<'a> {
w: &'a mut W,
}
impl<'a> _INSELW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 31;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:4 - Interval Select"]
#[inline]
pub fn insel(&self) -> INSELR {
let bits = {
const MASK: u8 = 31;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
};
INSELR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:4 - Interval Select"]
#[inline]
pub fn insel(&mut self) -> _INSELW {
_INSELW { w: self }
}
}
| 23.415094 | 59 | 0.491136 |
1ae0581262f5612b65d2c3f3a71cf4f885fc5b7e | 348,776 | /* automatically generated by rust-bindgen 0.59.1 */
pub type va_list = __builtin_va_list;
pub type __dev_t = libc::c_ulong;
pub type __uid_t = libc::c_uint;
pub type __gid_t = libc::c_uint;
pub type __ino_t = libc::c_ulong;
pub type __mode_t = libc::c_uint;
pub type __nlink_t = libc::c_ulong;
pub type __off_t = libc::c_long;
pub type __off64_t = libc::c_long;
pub type __time_t = libc::c_long;
pub type __blksize_t = libc::c_long;
pub type __blkcnt_t = libc::c_long;
pub type __syscall_slong_t = libc::c_long;
pub type FILE = _IO_FILE;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct _IO_marker {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct _IO_codecvt {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct _IO_wide_data {
_unused: [u8; 0],
}
pub type _IO_lock_t = libc::c_void;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct _IO_FILE {
pub _flags: libc::c_int,
pub _IO_read_ptr: *mut libc::c_char,
pub _IO_read_end: *mut libc::c_char,
pub _IO_read_base: *mut libc::c_char,
pub _IO_write_base: *mut libc::c_char,
pub _IO_write_ptr: *mut libc::c_char,
pub _IO_write_end: *mut libc::c_char,
pub _IO_buf_base: *mut libc::c_char,
pub _IO_buf_end: *mut libc::c_char,
pub _IO_save_base: *mut libc::c_char,
pub _IO_backup_base: *mut libc::c_char,
pub _IO_save_end: *mut libc::c_char,
pub _markers: *mut _IO_marker,
pub _chain: *mut _IO_FILE,
pub _fileno: libc::c_int,
pub _flags2: libc::c_int,
pub _old_offset: __off_t,
pub _cur_column: libc::c_ushort,
pub _vtable_offset: libc::c_schar,
pub _shortbuf: [libc::c_char; 1usize],
pub _lock: *mut _IO_lock_t,
pub _offset: __off64_t,
pub _codecvt: *mut _IO_codecvt,
pub _wide_data: *mut _IO_wide_data,
pub _freeres_list: *mut _IO_FILE,
pub _freeres_buf: *mut libc::c_void,
pub __pad5: usize,
pub _mode: libc::c_int,
pub _unused2: [libc::c_char; 20usize],
}
#[test]
fn bindgen_test_layout__IO_FILE() {
assert_eq!(
::std::mem::size_of::<_IO_FILE>(),
216usize,
concat!("Size of: ", stringify!(_IO_FILE))
);
assert_eq!(
::std::mem::align_of::<_IO_FILE>(),
8usize,
concat!("Alignment of ", stringify!(_IO_FILE))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._flags as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_flags)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._IO_read_ptr as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_IO_read_ptr)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._IO_read_end as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_IO_read_end)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._IO_read_base as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_IO_read_base)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._IO_write_base as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_IO_write_base)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._IO_write_ptr as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_IO_write_ptr)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._IO_write_end as *const _ as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_IO_write_end)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._IO_buf_base as *const _ as usize },
56usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_IO_buf_base)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._IO_buf_end as *const _ as usize },
64usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_IO_buf_end)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._IO_save_base as *const _ as usize },
72usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_IO_save_base)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._IO_backup_base as *const _ as usize },
80usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_IO_backup_base)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._IO_save_end as *const _ as usize },
88usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_IO_save_end)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._markers as *const _ as usize },
96usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_markers)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._chain as *const _ as usize },
104usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_chain)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._fileno as *const _ as usize },
112usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_fileno)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._flags2 as *const _ as usize },
116usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_flags2)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._old_offset as *const _ as usize },
120usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_old_offset)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._cur_column as *const _ as usize },
128usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_cur_column)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._vtable_offset as *const _ as usize },
130usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_vtable_offset)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._shortbuf as *const _ as usize },
131usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_shortbuf)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._lock as *const _ as usize },
136usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_lock)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._offset as *const _ as usize },
144usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_offset)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._codecvt as *const _ as usize },
152usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_codecvt)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._wide_data as *const _ as usize },
160usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_wide_data)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._freeres_list as *const _ as usize },
168usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_freeres_list)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._freeres_buf as *const _ as usize },
176usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_freeres_buf)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>())).__pad5 as *const _ as usize },
184usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(__pad5)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._mode as *const _ as usize },
192usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_mode)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_IO_FILE>()))._unused2 as *const _ as usize },
196usize,
concat!(
"Offset of field: ",
stringify!(_IO_FILE),
"::",
stringify!(_unused2)
)
);
}
pub type wchar_t = libc::c_int;
pub type time_t = __time_t;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct timespec {
pub tv_sec: __time_t,
pub tv_nsec: __syscall_slong_t,
}
#[test]
fn bindgen_test_layout_timespec() {
assert_eq!(
::std::mem::size_of::<timespec>(),
16usize,
concat!("Size of: ", stringify!(timespec))
);
assert_eq!(
::std::mem::align_of::<timespec>(),
8usize,
concat!("Alignment of ", stringify!(timespec))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<timespec>())).tv_sec as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(timespec),
"::",
stringify!(tv_sec)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<timespec>())).tv_nsec as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(timespec),
"::",
stringify!(tv_nsec)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct tm {
pub tm_sec: libc::c_int,
pub tm_min: libc::c_int,
pub tm_hour: libc::c_int,
pub tm_mday: libc::c_int,
pub tm_mon: libc::c_int,
pub tm_year: libc::c_int,
pub tm_wday: libc::c_int,
pub tm_yday: libc::c_int,
pub tm_isdst: libc::c_int,
pub tm_gmtoff: libc::c_long,
pub tm_zone: *const libc::c_char,
}
#[test]
fn bindgen_test_layout_tm() {
assert_eq!(
::std::mem::size_of::<tm>(),
56usize,
concat!("Size of: ", stringify!(tm))
);
assert_eq!(
::std::mem::align_of::<tm>(),
8usize,
concat!("Alignment of ", stringify!(tm))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_sec as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_sec)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_min as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_min)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_hour as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_hour)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_mday as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_mday)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_mon as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_mon)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_year as *const _ as usize },
20usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_year)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_wday as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_wday)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_yday as *const _ as usize },
28usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_yday)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_isdst as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_isdst)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_gmtoff as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_gmtoff)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<tm>())).tm_zone as *const _ as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(tm),
"::",
stringify!(tm_zone)
)
);
}
pub type GInt32 = libc::c_int;
pub type GUInt32 = libc::c_uint;
pub type GInt16 = libc::c_short;
pub type GByte = libc::c_uchar;
pub type GIntBig = libc::c_longlong;
pub type GUIntBig = libc::c_ulonglong;
pub type GInt64 = GIntBig;
pub type GUInt64 = GUIntBig;
pub type GPtrDiff_t = GIntBig;
pub type CSLConstList = *mut *mut libc::c_char;
extern "C" {
pub fn CPLAtomicAdd(ptr: *mut libc::c_int, increment: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn CPLAtomicCompareAndExchange(
ptr: *mut libc::c_int,
oldval: libc::c_int,
newval: libc::c_int,
) -> libc::c_int;
}
pub type GDALProgressFunc = ::std::option::Option<
unsafe extern "C" fn(
dfComplete: f64,
pszMessage: *const libc::c_char,
pProgressArg: *mut libc::c_void,
) -> libc::c_int,
>;
extern "C" {
pub fn GDALDummyProgress(
arg1: f64,
arg2: *const libc::c_char,
arg3: *mut libc::c_void,
) -> libc::c_int;
}
extern "C" {
pub fn GDALTermProgress(
arg1: f64,
arg2: *const libc::c_char,
arg3: *mut libc::c_void,
) -> libc::c_int;
}
extern "C" {
pub fn GDALScaledProgress(
arg1: f64,
arg2: *const libc::c_char,
arg3: *mut libc::c_void,
) -> libc::c_int;
}
extern "C" {
pub fn GDALCreateScaledProgress(
arg1: f64,
arg2: f64,
arg3: GDALProgressFunc,
arg4: *mut libc::c_void,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALDestroyScaledProgress(arg1: *mut libc::c_void);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct stat {
pub st_dev: __dev_t,
pub st_ino: __ino_t,
pub st_nlink: __nlink_t,
pub st_mode: __mode_t,
pub st_uid: __uid_t,
pub st_gid: __gid_t,
pub __pad0: libc::c_int,
pub st_rdev: __dev_t,
pub st_size: __off_t,
pub st_blksize: __blksize_t,
pub st_blocks: __blkcnt_t,
pub st_atim: timespec,
pub st_mtim: timespec,
pub st_ctim: timespec,
pub __glibc_reserved: [__syscall_slong_t; 3usize],
}
#[test]
fn bindgen_test_layout_stat() {
assert_eq!(
::std::mem::size_of::<stat>(),
144usize,
concat!("Size of: ", stringify!(stat))
);
assert_eq!(
::std::mem::align_of::<stat>(),
8usize,
concat!("Alignment of ", stringify!(stat))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_dev as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_dev)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_ino as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_ino)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_nlink as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_nlink)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_mode as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_mode)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_uid as *const _ as usize },
28usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_uid)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_gid as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_gid)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).__pad0 as *const _ as usize },
36usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(__pad0)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_rdev as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_rdev)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_size as *const _ as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_size)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_blksize as *const _ as usize },
56usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_blksize)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_blocks as *const _ as usize },
64usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_blocks)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_atim as *const _ as usize },
72usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_atim)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_mtim as *const _ as usize },
88usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_mtim)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).st_ctim as *const _ as usize },
104usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(st_ctim)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<stat>())).__glibc_reserved as *const _ as usize },
120usize,
concat!(
"Offset of field: ",
stringify!(stat),
"::",
stringify!(__glibc_reserved)
)
);
}
extern "C" {
pub fn VSIFOpen(arg1: *const libc::c_char, arg2: *const libc::c_char) -> *mut FILE;
}
extern "C" {
pub fn VSIFClose(arg1: *mut FILE) -> libc::c_int;
}
extern "C" {
pub fn VSIFSeek(arg1: *mut FILE, arg2: libc::c_long, arg3: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn VSIFTell(arg1: *mut FILE) -> libc::c_long;
}
extern "C" {
pub fn VSIRewind(arg1: *mut FILE);
}
extern "C" {
pub fn VSIFFlush(arg1: *mut FILE);
}
extern "C" {
pub fn VSIFRead(arg1: *mut libc::c_void, arg2: usize, arg3: usize, arg4: *mut FILE) -> usize;
}
extern "C" {
pub fn VSIFWrite(arg1: *const libc::c_void, arg2: usize, arg3: usize, arg4: *mut FILE)
-> usize;
}
extern "C" {
pub fn VSIFGets(
arg1: *mut libc::c_char,
arg2: libc::c_int,
arg3: *mut FILE,
) -> *mut libc::c_char;
}
extern "C" {
pub fn VSIFPuts(arg1: *const libc::c_char, arg2: *mut FILE) -> libc::c_int;
}
extern "C" {
pub fn VSIFPrintf(arg1: *mut FILE, arg2: *const libc::c_char, ...) -> libc::c_int;
}
extern "C" {
pub fn VSIFGetc(arg1: *mut FILE) -> libc::c_int;
}
extern "C" {
pub fn VSIFPutc(arg1: libc::c_int, arg2: *mut FILE) -> libc::c_int;
}
extern "C" {
pub fn VSIUngetc(arg1: libc::c_int, arg2: *mut FILE) -> libc::c_int;
}
extern "C" {
pub fn VSIFEof(arg1: *mut FILE) -> libc::c_int;
}
pub type VSIStatBuf = stat;
extern "C" {
pub fn VSIStat(arg1: *const libc::c_char, arg2: *mut VSIStatBuf) -> libc::c_int;
}
pub type vsi_l_offset = GUIntBig;
pub type VSILFILE = FILE;
extern "C" {
pub fn VSIFOpenL(arg1: *const libc::c_char, arg2: *const libc::c_char) -> *mut VSILFILE;
}
extern "C" {
pub fn VSIFOpenExL(
arg1: *const libc::c_char,
arg2: *const libc::c_char,
arg3: libc::c_int,
) -> *mut VSILFILE;
}
extern "C" {
pub fn VSIFOpenEx2L(
arg1: *const libc::c_char,
arg2: *const libc::c_char,
arg3: libc::c_int,
arg4: CSLConstList,
) -> *mut VSILFILE;
}
extern "C" {
pub fn VSIFCloseL(arg1: *mut VSILFILE) -> libc::c_int;
}
extern "C" {
pub fn VSIFSeekL(arg1: *mut VSILFILE, arg2: vsi_l_offset, arg3: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn VSIFTellL(arg1: *mut VSILFILE) -> vsi_l_offset;
}
extern "C" {
pub fn VSIRewindL(arg1: *mut VSILFILE);
}
extern "C" {
pub fn VSIFReadL(
arg1: *mut libc::c_void,
arg2: usize,
arg3: usize,
arg4: *mut VSILFILE,
) -> usize;
}
extern "C" {
pub fn VSIFReadMultiRangeL(
nRanges: libc::c_int,
ppData: *mut *mut libc::c_void,
panOffsets: *const vsi_l_offset,
panSizes: *const usize,
arg1: *mut VSILFILE,
) -> libc::c_int;
}
extern "C" {
pub fn VSIFWriteL(
arg1: *const libc::c_void,
arg2: usize,
arg3: usize,
arg4: *mut VSILFILE,
) -> usize;
}
extern "C" {
pub fn VSIFEofL(arg1: *mut VSILFILE) -> libc::c_int;
}
extern "C" {
pub fn VSIFTruncateL(arg1: *mut VSILFILE, arg2: vsi_l_offset) -> libc::c_int;
}
extern "C" {
pub fn VSIFFlushL(arg1: *mut VSILFILE) -> libc::c_int;
}
extern "C" {
pub fn VSIFPrintfL(arg1: *mut VSILFILE, arg2: *const libc::c_char, ...) -> libc::c_int;
}
extern "C" {
pub fn VSIFPutcL(arg1: libc::c_int, arg2: *mut VSILFILE) -> libc::c_int;
}
pub mod VSIRangeStatus {
pub type Type = libc::c_uint;
pub const VSI_RANGE_STATUS_UNKNOWN: Type = 0;
pub const VSI_RANGE_STATUS_DATA: Type = 1;
pub const VSI_RANGE_STATUS_HOLE: Type = 2;
}
extern "C" {
pub fn VSIFGetRangeStatusL(
fp: *mut VSILFILE,
nStart: vsi_l_offset,
nLength: vsi_l_offset,
) -> VSIRangeStatus::Type;
}
extern "C" {
pub fn VSIIngestFile(
fp: *mut VSILFILE,
pszFilename: *const libc::c_char,
ppabyRet: *mut *mut GByte,
pnSize: *mut vsi_l_offset,
nMaxSize: GIntBig,
) -> libc::c_int;
}
extern "C" {
pub fn VSIOverwriteFile(
fpTarget: *mut VSILFILE,
pszSourceFilename: *const libc::c_char,
) -> libc::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct stat64 {
_unused: [u8; 0],
}
pub type VSIStatBufL = stat64;
extern "C" {
pub fn VSIStatL(arg1: *const libc::c_char, arg2: *mut VSIStatBufL) -> libc::c_int;
}
extern "C" {
pub fn VSIStatExL(
pszFilename: *const libc::c_char,
psStatBuf: *mut VSIStatBufL,
nFlags: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn VSIIsCaseSensitiveFS(pszFilename: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn VSISupportsSparseFiles(pszPath: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn VSIHasOptimizedReadMultiRange(pszPath: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn VSIGetActualURL(pszFilename: *const libc::c_char) -> *const libc::c_char;
}
extern "C" {
pub fn VSIGetSignedURL(
pszFilename: *const libc::c_char,
papszOptions: CSLConstList,
) -> *mut libc::c_char;
}
extern "C" {
pub fn VSIGetFileSystemOptions(pszFilename: *const libc::c_char) -> *const libc::c_char;
}
extern "C" {
pub fn VSIGetFileSystemsPrefixes() -> *mut *mut libc::c_char;
}
extern "C" {
pub fn VSIFGetNativeFileDescriptorL(arg1: *mut VSILFILE) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIGetFileMetadata(
pszFilename: *const libc::c_char,
pszDomain: *const libc::c_char,
papszOptions: CSLConstList,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn VSISetFileMetadata(
pszFilename: *const libc::c_char,
papszMetadata: CSLConstList,
pszDomain: *const libc::c_char,
papszOptions: CSLConstList,
) -> libc::c_int;
}
extern "C" {
pub fn VSICalloc(arg1: usize, arg2: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIMalloc(arg1: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIFree(arg1: *mut libc::c_void);
}
extern "C" {
pub fn VSIRealloc(arg1: *mut libc::c_void, arg2: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIStrdup(arg1: *const libc::c_char) -> *mut libc::c_char;
}
extern "C" {
pub fn VSIMallocAligned(nAlignment: usize, nSize: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIMallocAlignedAuto(nSize: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIFreeAligned(ptr: *mut libc::c_void);
}
extern "C" {
pub fn VSIMallocAlignedAutoVerbose(
nSize: usize,
pszFile: *const libc::c_char,
nLine: libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIMalloc2(nSize1: usize, nSize2: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIMalloc3(nSize1: usize, nSize2: usize, nSize3: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIMallocVerbose(
nSize: usize,
pszFile: *const libc::c_char,
nLine: libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIMalloc2Verbose(
nSize1: usize,
nSize2: usize,
pszFile: *const libc::c_char,
nLine: libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIMalloc3Verbose(
nSize1: usize,
nSize2: usize,
nSize3: usize,
pszFile: *const libc::c_char,
nLine: libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn VSICallocVerbose(
nCount: usize,
nSize: usize,
pszFile: *const libc::c_char,
nLine: libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIReallocVerbose(
pOldPtr: *mut libc::c_void,
nNewSize: usize,
pszFile: *const libc::c_char,
nLine: libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn VSIStrdupVerbose(
pszStr: *const libc::c_char,
pszFile: *const libc::c_char,
nLine: libc::c_int,
) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLGetPhysicalRAM() -> GIntBig;
}
extern "C" {
pub fn CPLGetUsablePhysicalRAM() -> GIntBig;
}
extern "C" {
pub fn VSIReadDir(arg1: *const libc::c_char) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn VSIReadDirRecursive(pszPath: *const libc::c_char) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn VSIReadDirEx(
pszPath: *const libc::c_char,
nMaxFiles: libc::c_int,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn VSISiblingFiles(pszPath: *const libc::c_char) -> *mut *mut libc::c_char;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct VSIDIR {
_unused: [u8; 0],
}
extern "C" {
pub fn VSIOpenDir(
pszPath: *const libc::c_char,
nRecurseDepth: libc::c_int,
papszOptions: *const *const libc::c_char,
) -> *mut VSIDIR;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct VSIDIREntry {
pub pszName: *mut libc::c_char,
pub nMode: libc::c_int,
pub nSize: vsi_l_offset,
pub nMTime: GIntBig,
pub bModeKnown: libc::c_char,
pub bSizeKnown: libc::c_char,
pub bMTimeKnown: libc::c_char,
pub papszExtra: *mut *mut libc::c_char,
}
#[test]
fn bindgen_test_layout_VSIDIREntry() {
assert_eq!(
::std::mem::size_of::<VSIDIREntry>(),
48usize,
concat!("Size of: ", stringify!(VSIDIREntry))
);
assert_eq!(
::std::mem::align_of::<VSIDIREntry>(),
8usize,
concat!("Alignment of ", stringify!(VSIDIREntry))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<VSIDIREntry>())).pszName as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(VSIDIREntry),
"::",
stringify!(pszName)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<VSIDIREntry>())).nMode as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(VSIDIREntry),
"::",
stringify!(nMode)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<VSIDIREntry>())).nSize as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(VSIDIREntry),
"::",
stringify!(nSize)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<VSIDIREntry>())).nMTime as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(VSIDIREntry),
"::",
stringify!(nMTime)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<VSIDIREntry>())).bModeKnown as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(VSIDIREntry),
"::",
stringify!(bModeKnown)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<VSIDIREntry>())).bSizeKnown as *const _ as usize },
33usize,
concat!(
"Offset of field: ",
stringify!(VSIDIREntry),
"::",
stringify!(bSizeKnown)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<VSIDIREntry>())).bMTimeKnown as *const _ as usize },
34usize,
concat!(
"Offset of field: ",
stringify!(VSIDIREntry),
"::",
stringify!(bMTimeKnown)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<VSIDIREntry>())).papszExtra as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(VSIDIREntry),
"::",
stringify!(papszExtra)
)
);
}
extern "C" {
pub fn VSIGetNextDirEntry(dir: *mut VSIDIR) -> *const VSIDIREntry;
}
extern "C" {
pub fn VSICloseDir(dir: *mut VSIDIR);
}
extern "C" {
pub fn VSIMkdir(pszPathname: *const libc::c_char, mode: libc::c_long) -> libc::c_int;
}
extern "C" {
pub fn VSIMkdirRecursive(pszPathname: *const libc::c_char, mode: libc::c_long) -> libc::c_int;
}
extern "C" {
pub fn VSIRmdir(pszDirname: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn VSIRmdirRecursive(pszDirname: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn VSIUnlink(pszFilename: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn VSIUnlinkBatch(papszFiles: CSLConstList) -> *mut libc::c_int;
}
extern "C" {
pub fn VSIRename(oldpath: *const libc::c_char, newpath: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn VSISync(
pszSource: *const libc::c_char,
pszTarget: *const libc::c_char,
papszOptions: *const *const libc::c_char,
pProgressFunc: GDALProgressFunc,
pProgressData: *mut libc::c_void,
ppapszOutputs: *mut *mut *mut libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn VSIAbortPendingUploads(pszFilename: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn VSIStrerror(arg1: libc::c_int) -> *mut libc::c_char;
}
extern "C" {
pub fn VSIGetDiskFreeSpace(pszDirname: *const libc::c_char) -> GIntBig;
}
extern "C" {
pub fn VSINetworkStatsReset();
}
extern "C" {
pub fn VSINetworkStatsGetAsSerializedJSON(
papszOptions: *mut *mut libc::c_char,
) -> *mut libc::c_char;
}
extern "C" {
pub fn VSIInstallMemFileHandler();
}
extern "C" {
pub fn VSIInstallLargeFileHandler();
}
extern "C" {
pub fn VSIInstallSubFileHandler();
}
extern "C" {
pub fn VSIInstallCurlFileHandler();
}
extern "C" {
pub fn VSICurlClearCache();
}
extern "C" {
pub fn VSICurlPartialClearCache(pszFilenamePrefix: *const libc::c_char);
}
extern "C" {
pub fn VSIInstallCurlStreamingFileHandler();
}
extern "C" {
pub fn VSIInstallS3FileHandler();
}
extern "C" {
pub fn VSIInstallS3StreamingFileHandler();
}
extern "C" {
pub fn VSIInstallGSFileHandler();
}
extern "C" {
pub fn VSIInstallGSStreamingFileHandler();
}
extern "C" {
pub fn VSIInstallAzureFileHandler();
}
extern "C" {
pub fn VSIInstallAzureStreamingFileHandler();
}
extern "C" {
pub fn VSIInstallADLSFileHandler();
}
extern "C" {
pub fn VSIInstallOSSFileHandler();
}
extern "C" {
pub fn VSIInstallOSSStreamingFileHandler();
}
extern "C" {
pub fn VSIInstallSwiftFileHandler();
}
extern "C" {
pub fn VSIInstallSwiftStreamingFileHandler();
}
extern "C" {
pub fn VSIInstallGZipFileHandler();
}
extern "C" {
pub fn VSIInstallZipFileHandler();
}
extern "C" {
pub fn VSIInstallStdinHandler();
}
extern "C" {
pub fn VSIInstallHdfsHandler();
}
extern "C" {
pub fn VSIInstallWebHdfsHandler();
}
extern "C" {
pub fn VSIInstallStdoutHandler();
}
extern "C" {
pub fn VSIInstallSparseFileHandler();
}
extern "C" {
pub fn VSIInstallTarFileHandler();
}
extern "C" {
pub fn VSIInstallCryptFileHandler();
}
extern "C" {
pub fn VSISetCryptKey(pabyKey: *const GByte, nKeySize: libc::c_int);
}
extern "C" {
pub fn VSICleanupFileManager();
}
extern "C" {
pub fn VSIFileFromMemBuffer(
pszFilename: *const libc::c_char,
pabyData: *mut GByte,
nDataLength: vsi_l_offset,
bTakeOwnership: libc::c_int,
) -> *mut VSILFILE;
}
extern "C" {
pub fn VSIGetMemFileBuffer(
pszFilename: *const libc::c_char,
pnDataLength: *mut vsi_l_offset,
bUnlinkAndSeize: libc::c_int,
) -> *mut GByte;
}
pub type VSIWriteFunction = ::std::option::Option<
unsafe extern "C" fn(
ptr: *const libc::c_void,
size: usize,
nmemb: usize,
stream: *mut FILE,
) -> usize,
>;
extern "C" {
pub fn VSIStdoutSetRedirection(pFct: VSIWriteFunction, stream: *mut FILE);
}
pub type VSIFilesystemPluginStatCallback = ::std::option::Option<
unsafe extern "C" fn(
pUserData: *mut libc::c_void,
pszFilename: *const libc::c_char,
pStatBuf: *mut VSIStatBufL,
nFlags: libc::c_int,
) -> libc::c_int,
>;
pub type VSIFilesystemPluginUnlinkCallback = ::std::option::Option<
unsafe extern "C" fn(
pUserData: *mut libc::c_void,
pszFilename: *const libc::c_char,
) -> libc::c_int,
>;
pub type VSIFilesystemPluginRenameCallback = ::std::option::Option<
unsafe extern "C" fn(
pUserData: *mut libc::c_void,
oldpath: *const libc::c_char,
newpath: *const libc::c_char,
) -> libc::c_int,
>;
pub type VSIFilesystemPluginMkdirCallback = ::std::option::Option<
unsafe extern "C" fn(
pUserData: *mut libc::c_void,
pszDirname: *const libc::c_char,
nMode: libc::c_long,
) -> libc::c_int,
>;
pub type VSIFilesystemPluginRmdirCallback = ::std::option::Option<
unsafe extern "C" fn(
pUserData: *mut libc::c_void,
pszDirname: *const libc::c_char,
) -> libc::c_int,
>;
pub type VSIFilesystemPluginReadDirCallback = ::std::option::Option<
unsafe extern "C" fn(
pUserData: *mut libc::c_void,
pszDirname: *const libc::c_char,
nMaxFiles: libc::c_int,
) -> *mut *mut libc::c_char,
>;
pub type VSIFilesystemPluginSiblingFilesCallback = ::std::option::Option<
unsafe extern "C" fn(
pUserData: *mut libc::c_void,
pszDirname: *const libc::c_char,
) -> *mut *mut libc::c_char,
>;
pub type VSIFilesystemPluginOpenCallback = ::std::option::Option<
unsafe extern "C" fn(
pUserData: *mut libc::c_void,
pszFilename: *const libc::c_char,
pszAccess: *const libc::c_char,
) -> *mut libc::c_void,
>;
pub type VSIFilesystemPluginTellCallback =
::std::option::Option<unsafe extern "C" fn(pFile: *mut libc::c_void) -> vsi_l_offset>;
pub type VSIFilesystemPluginSeekCallback = ::std::option::Option<
unsafe extern "C" fn(
pFile: *mut libc::c_void,
nOffset: vsi_l_offset,
nWhence: libc::c_int,
) -> libc::c_int,
>;
pub type VSIFilesystemPluginReadCallback = ::std::option::Option<
unsafe extern "C" fn(
pFile: *mut libc::c_void,
pBuffer: *mut libc::c_void,
nSize: usize,
nCount: usize,
) -> usize,
>;
pub type VSIFilesystemPluginReadMultiRangeCallback = ::std::option::Option<
unsafe extern "C" fn(
pFile: *mut libc::c_void,
nRanges: libc::c_int,
ppData: *mut *mut libc::c_void,
panOffsets: *const vsi_l_offset,
panSizes: *const usize,
) -> libc::c_int,
>;
pub type VSIFilesystemPluginGetRangeStatusCallback = ::std::option::Option<
unsafe extern "C" fn(
pFile: *mut libc::c_void,
nOffset: vsi_l_offset,
nLength: vsi_l_offset,
) -> VSIRangeStatus::Type,
>;
pub type VSIFilesystemPluginEofCallback =
::std::option::Option<unsafe extern "C" fn(pFile: *mut libc::c_void) -> libc::c_int>;
pub type VSIFilesystemPluginWriteCallback = ::std::option::Option<
unsafe extern "C" fn(
pFile: *mut libc::c_void,
pBuffer: *const libc::c_void,
nSize: usize,
nCount: usize,
) -> usize,
>;
pub type VSIFilesystemPluginFlushCallback =
::std::option::Option<unsafe extern "C" fn(pFile: *mut libc::c_void) -> libc::c_int>;
pub type VSIFilesystemPluginTruncateCallback = ::std::option::Option<
unsafe extern "C" fn(pFile: *mut libc::c_void, nNewSize: vsi_l_offset) -> libc::c_int,
>;
pub type VSIFilesystemPluginCloseCallback =
::std::option::Option<unsafe extern "C" fn(pFile: *mut libc::c_void) -> libc::c_int>;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct VSIFilesystemPluginCallbacksStruct {
pub pUserData: *mut libc::c_void,
pub stat: VSIFilesystemPluginStatCallback,
pub unlink: VSIFilesystemPluginUnlinkCallback,
pub rename: VSIFilesystemPluginRenameCallback,
pub mkdir: VSIFilesystemPluginMkdirCallback,
pub rmdir: VSIFilesystemPluginRmdirCallback,
pub read_dir: VSIFilesystemPluginReadDirCallback,
pub open: VSIFilesystemPluginOpenCallback,
pub tell: VSIFilesystemPluginTellCallback,
pub seek: VSIFilesystemPluginSeekCallback,
pub read: VSIFilesystemPluginReadCallback,
pub read_multi_range: VSIFilesystemPluginReadMultiRangeCallback,
pub get_range_status: VSIFilesystemPluginGetRangeStatusCallback,
pub eof: VSIFilesystemPluginEofCallback,
pub write: VSIFilesystemPluginWriteCallback,
pub flush: VSIFilesystemPluginFlushCallback,
pub truncate: VSIFilesystemPluginTruncateCallback,
pub close: VSIFilesystemPluginCloseCallback,
pub nBufferSize: usize,
pub nCacheSize: usize,
pub sibling_files: VSIFilesystemPluginSiblingFilesCallback,
}
#[test]
fn bindgen_test_layout_VSIFilesystemPluginCallbacksStruct() {
assert_eq!(
::std::mem::size_of::<VSIFilesystemPluginCallbacksStruct>(),
168usize,
concat!("Size of: ", stringify!(VSIFilesystemPluginCallbacksStruct))
);
assert_eq!(
::std::mem::align_of::<VSIFilesystemPluginCallbacksStruct>(),
8usize,
concat!(
"Alignment of ",
stringify!(VSIFilesystemPluginCallbacksStruct)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).pUserData as *const _
as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(pUserData)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).stat as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(stat)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).unlink as *const _
as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(unlink)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).rename as *const _
as usize
},
24usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(rename)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).mkdir as *const _
as usize
},
32usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(mkdir)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).rmdir as *const _
as usize
},
40usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(rmdir)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).read_dir as *const _
as usize
},
48usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(read_dir)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).open as *const _ as usize
},
56usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(open)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).tell as *const _ as usize
},
64usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(tell)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).seek as *const _ as usize
},
72usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(seek)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).read as *const _ as usize
},
80usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(read)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).read_multi_range
as *const _ as usize
},
88usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(read_multi_range)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).get_range_status
as *const _ as usize
},
96usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(get_range_status)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).eof as *const _ as usize
},
104usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(eof)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).write as *const _
as usize
},
112usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(write)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).flush as *const _
as usize
},
120usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(flush)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).truncate as *const _
as usize
},
128usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(truncate)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).close as *const _
as usize
},
136usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(close)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).nBufferSize as *const _
as usize
},
144usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(nBufferSize)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).nCacheSize as *const _
as usize
},
152usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(nCacheSize)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<VSIFilesystemPluginCallbacksStruct>())).sibling_files as *const _
as usize
},
160usize,
concat!(
"Offset of field: ",
stringify!(VSIFilesystemPluginCallbacksStruct),
"::",
stringify!(sibling_files)
)
);
}
extern "C" {
pub fn VSIAllocFilesystemPluginCallbacksStruct() -> *mut VSIFilesystemPluginCallbacksStruct;
}
extern "C" {
pub fn VSIFreeFilesystemPluginCallbacksStruct(poCb: *mut VSIFilesystemPluginCallbacksStruct);
}
extern "C" {
pub fn VSIInstallPluginHandler(
pszPrefix: *const libc::c_char,
poCb: *const VSIFilesystemPluginCallbacksStruct,
) -> libc::c_int;
}
extern "C" {
pub fn VSITime(arg1: *mut libc::c_ulong) -> libc::c_ulong;
}
extern "C" {
pub fn VSICTime(arg1: libc::c_ulong) -> *const libc::c_char;
}
extern "C" {
pub fn VSIGMTime(pnTime: *const time_t, poBrokenTime: *mut tm) -> *mut tm;
}
extern "C" {
pub fn VSILocalTime(pnTime: *const time_t, poBrokenTime: *mut tm) -> *mut tm;
}
pub mod CPLErr {
pub type Type = libc::c_uint;
pub const CE_None: Type = 0;
pub const CE_Debug: Type = 1;
pub const CE_Warning: Type = 2;
pub const CE_Failure: Type = 3;
pub const CE_Fatal: Type = 4;
}
pub type CPLErrorNum = libc::c_int;
extern "C" {
pub fn CPLError(eErrClass: CPLErr::Type, err_no: CPLErrorNum, fmt: *const libc::c_char, ...);
}
extern "C" {
pub fn CPLErrorV(
arg1: CPLErr::Type,
arg2: CPLErrorNum,
arg3: *const libc::c_char,
arg4: *mut __va_list_tag,
);
}
extern "C" {
pub fn CPLEmergencyError(arg1: *const libc::c_char);
}
extern "C" {
pub fn CPLErrorReset();
}
extern "C" {
pub fn CPLGetLastErrorNo() -> CPLErrorNum;
}
extern "C" {
pub fn CPLGetLastErrorType() -> CPLErr::Type;
}
extern "C" {
pub fn CPLGetLastErrorMsg() -> *const libc::c_char;
}
extern "C" {
pub fn CPLGetErrorCounter() -> GUInt32;
}
extern "C" {
pub fn CPLGetErrorHandlerUserData() -> *mut libc::c_void;
}
extern "C" {
pub fn CPLErrorSetState(
eErrClass: CPLErr::Type,
err_no: CPLErrorNum,
pszMsg: *const libc::c_char,
);
}
extern "C" {
pub fn CPLCleanupErrorMutex();
}
pub type CPLErrorHandler = ::std::option::Option<
unsafe extern "C" fn(arg1: CPLErr::Type, arg2: CPLErrorNum, arg3: *const libc::c_char),
>;
extern "C" {
pub fn CPLLoggingErrorHandler(arg1: CPLErr::Type, arg2: CPLErrorNum, arg3: *const libc::c_char);
}
extern "C" {
pub fn CPLDefaultErrorHandler(arg1: CPLErr::Type, arg2: CPLErrorNum, arg3: *const libc::c_char);
}
extern "C" {
pub fn CPLQuietErrorHandler(arg1: CPLErr::Type, arg2: CPLErrorNum, arg3: *const libc::c_char);
}
extern "C" {
pub fn CPLTurnFailureIntoWarning(bOn: libc::c_int);
}
extern "C" {
pub fn CPLSetErrorHandler(arg1: CPLErrorHandler) -> CPLErrorHandler;
}
extern "C" {
pub fn CPLSetErrorHandlerEx(arg1: CPLErrorHandler, arg2: *mut libc::c_void) -> CPLErrorHandler;
}
extern "C" {
pub fn CPLPushErrorHandler(arg1: CPLErrorHandler);
}
extern "C" {
pub fn CPLPushErrorHandlerEx(arg1: CPLErrorHandler, arg2: *mut libc::c_void);
}
extern "C" {
pub fn CPLSetCurrentErrorHandlerCatchDebug(bCatchDebug: libc::c_int);
}
extern "C" {
pub fn CPLPopErrorHandler();
}
extern "C" {
pub fn CPLDebug(arg1: *const libc::c_char, arg2: *const libc::c_char, ...);
}
extern "C" {
pub fn CPLVerifyConfiguration();
}
extern "C" {
pub fn CPLGetConfigOption(
arg1: *const libc::c_char,
arg2: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLGetThreadLocalConfigOption(
arg1: *const libc::c_char,
arg2: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLSetConfigOption(arg1: *const libc::c_char, arg2: *const libc::c_char);
}
extern "C" {
pub fn CPLSetThreadLocalConfigOption(
pszKey: *const libc::c_char,
pszValue: *const libc::c_char,
);
}
extern "C" {
pub fn CPLFreeConfig();
}
extern "C" {
pub fn CPLGetConfigOptions() -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CPLSetConfigOptions(papszConfigOptions: *const *const libc::c_char);
}
extern "C" {
pub fn CPLGetThreadLocalConfigOptions() -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CPLSetThreadLocalConfigOptions(papszConfigOptions: *const *const libc::c_char);
}
extern "C" {
pub fn CPLLoadConfigOptionsFromFile(
pszFilename: *const libc::c_char,
bOverrideEnvVars: libc::c_int,
);
}
extern "C" {
pub fn CPLLoadConfigOptionsFromPredefinedFiles();
}
extern "C" {
pub fn CPLMalloc(arg1: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLCalloc(arg1: usize, arg2: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLRealloc(arg1: *mut libc::c_void, arg2: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLStrdup(arg1: *const libc::c_char) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLStrlwr(arg1: *mut libc::c_char) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLFGets(
arg1: *mut libc::c_char,
arg2: libc::c_int,
arg3: *mut FILE,
) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLReadLine(arg1: *mut FILE) -> *const libc::c_char;
}
extern "C" {
pub fn CPLReadLineL(arg1: *mut VSILFILE) -> *const libc::c_char;
}
extern "C" {
pub fn CPLReadLine2L(
arg1: *mut VSILFILE,
arg2: libc::c_int,
arg3: CSLConstList,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLReadLine3L(
arg1: *mut VSILFILE,
arg2: libc::c_int,
arg3: *mut libc::c_int,
arg4: CSLConstList,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLAtof(arg1: *const libc::c_char) -> f64;
}
extern "C" {
pub fn CPLAtofDelim(arg1: *const libc::c_char, arg2: libc::c_char) -> f64;
}
extern "C" {
pub fn CPLStrtod(arg1: *const libc::c_char, arg2: *mut *mut libc::c_char) -> f64;
}
extern "C" {
pub fn CPLStrtodDelim(
arg1: *const libc::c_char,
arg2: *mut *mut libc::c_char,
arg3: libc::c_char,
) -> f64;
}
extern "C" {
pub fn CPLStrtof(arg1: *const libc::c_char, arg2: *mut *mut libc::c_char) -> f32;
}
extern "C" {
pub fn CPLStrtofDelim(
arg1: *const libc::c_char,
arg2: *mut *mut libc::c_char,
arg3: libc::c_char,
) -> f32;
}
extern "C" {
pub fn CPLAtofM(arg1: *const libc::c_char) -> f64;
}
extern "C" {
pub fn CPLScanString(
arg1: *const libc::c_char,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: libc::c_int,
) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLScanDouble(arg1: *const libc::c_char, arg2: libc::c_int) -> f64;
}
extern "C" {
pub fn CPLScanLong(arg1: *const libc::c_char, arg2: libc::c_int) -> libc::c_long;
}
extern "C" {
pub fn CPLScanULong(arg1: *const libc::c_char, arg2: libc::c_int) -> libc::c_ulong;
}
extern "C" {
pub fn CPLScanUIntBig(arg1: *const libc::c_char, arg2: libc::c_int) -> GUIntBig;
}
extern "C" {
pub fn CPLAtoGIntBig(pszString: *const libc::c_char) -> GIntBig;
}
extern "C" {
pub fn CPLAtoGIntBigEx(
pszString: *const libc::c_char,
bWarn: libc::c_int,
pbOverflow: *mut libc::c_int,
) -> GIntBig;
}
extern "C" {
pub fn CPLScanPointer(arg1: *const libc::c_char, arg2: libc::c_int) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLPrintString(
arg1: *mut libc::c_char,
arg2: *const libc::c_char,
arg3: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn CPLPrintStringFill(
arg1: *mut libc::c_char,
arg2: *const libc::c_char,
arg3: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn CPLPrintInt32(arg1: *mut libc::c_char, arg2: GInt32, arg3: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn CPLPrintUIntBig(
arg1: *mut libc::c_char,
arg2: GUIntBig,
arg3: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn CPLPrintDouble(
arg1: *mut libc::c_char,
arg2: *const libc::c_char,
arg3: f64,
arg4: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn CPLPrintTime(
arg1: *mut libc::c_char,
arg2: libc::c_int,
arg3: *const libc::c_char,
arg4: *const tm,
arg5: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn CPLPrintPointer(
arg1: *mut libc::c_char,
arg2: *mut libc::c_void,
arg3: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn CPLGetSymbol(arg1: *const libc::c_char, arg2: *const libc::c_char) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLGetExecPath(pszPathBuf: *mut libc::c_char, nMaxLength: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn CPLGetPath(arg1: *const libc::c_char) -> *const libc::c_char;
}
extern "C" {
pub fn CPLGetDirname(arg1: *const libc::c_char) -> *const libc::c_char;
}
extern "C" {
pub fn CPLGetFilename(arg1: *const libc::c_char) -> *const libc::c_char;
}
extern "C" {
pub fn CPLGetBasename(arg1: *const libc::c_char) -> *const libc::c_char;
}
extern "C" {
pub fn CPLGetExtension(arg1: *const libc::c_char) -> *const libc::c_char;
}
extern "C" {
pub fn CPLGetCurrentDir() -> *mut libc::c_char;
}
extern "C" {
pub fn CPLFormFilename(
pszPath: *const libc::c_char,
pszBasename: *const libc::c_char,
pszExtension: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLFormCIFilename(
pszPath: *const libc::c_char,
pszBasename: *const libc::c_char,
pszExtension: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLResetExtension(
arg1: *const libc::c_char,
arg2: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLProjectRelativeFilename(
pszProjectDir: *const libc::c_char,
pszSecondaryFilename: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLIsFilenameRelative(pszFilename: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn CPLExtractRelativePath(
arg1: *const libc::c_char,
arg2: *const libc::c_char,
arg3: *mut libc::c_int,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLCleanTrailingSlash(arg1: *const libc::c_char) -> *const libc::c_char;
}
extern "C" {
pub fn CPLCorrespondingPaths(
pszOldFilename: *const libc::c_char,
pszNewFilename: *const libc::c_char,
papszFileList: *mut *mut libc::c_char,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CPLCheckForFile(
pszFilename: *mut libc::c_char,
papszSiblingList: *mut *mut libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn CPLGenerateTempFilename(pszStem: *const libc::c_char) -> *const libc::c_char;
}
extern "C" {
pub fn CPLExpandTilde(pszFilename: *const libc::c_char) -> *const libc::c_char;
}
extern "C" {
pub fn CPLGetHomeDir() -> *const libc::c_char;
}
extern "C" {
pub fn CPLLaunderForFilename(
pszName: *const libc::c_char,
pszOutputPath: *const libc::c_char,
) -> *const libc::c_char;
}
pub type CPLFileFinder = ::std::option::Option<
unsafe extern "C" fn(
arg1: *const libc::c_char,
arg2: *const libc::c_char,
) -> *const libc::c_char,
>;
extern "C" {
pub fn CPLFindFile(
pszClass: *const libc::c_char,
pszBasename: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLDefaultFindFile(
pszClass: *const libc::c_char,
pszBasename: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLPushFileFinder(pfnFinder: CPLFileFinder);
}
extern "C" {
pub fn CPLPopFileFinder() -> CPLFileFinder;
}
extern "C" {
pub fn CPLPushFinderLocation(arg1: *const libc::c_char);
}
extern "C" {
pub fn CPLPopFinderLocation();
}
extern "C" {
pub fn CPLFinderClean();
}
extern "C" {
pub fn CPLStat(arg1: *const libc::c_char, arg2: *mut VSIStatBuf) -> libc::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct CPLSharedFileInfo {
pub fp: *mut FILE,
pub nRefCount: libc::c_int,
pub bLarge: libc::c_int,
pub pszFilename: *mut libc::c_char,
pub pszAccess: *mut libc::c_char,
}
#[test]
fn bindgen_test_layout_CPLSharedFileInfo() {
assert_eq!(
::std::mem::size_of::<CPLSharedFileInfo>(),
32usize,
concat!("Size of: ", stringify!(CPLSharedFileInfo))
);
assert_eq!(
::std::mem::align_of::<CPLSharedFileInfo>(),
8usize,
concat!("Alignment of ", stringify!(CPLSharedFileInfo))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLSharedFileInfo>())).fp as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(CPLSharedFileInfo),
"::",
stringify!(fp)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLSharedFileInfo>())).nRefCount as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(CPLSharedFileInfo),
"::",
stringify!(nRefCount)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLSharedFileInfo>())).bLarge as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(CPLSharedFileInfo),
"::",
stringify!(bLarge)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLSharedFileInfo>())).pszFilename as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(CPLSharedFileInfo),
"::",
stringify!(pszFilename)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLSharedFileInfo>())).pszAccess as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(CPLSharedFileInfo),
"::",
stringify!(pszAccess)
)
);
}
extern "C" {
pub fn CPLOpenShared(
arg1: *const libc::c_char,
arg2: *const libc::c_char,
arg3: libc::c_int,
) -> *mut FILE;
}
extern "C" {
pub fn CPLCloseShared(arg1: *mut FILE);
}
extern "C" {
pub fn CPLGetSharedList(arg1: *mut libc::c_int) -> *mut CPLSharedFileInfo;
}
extern "C" {
pub fn CPLDumpSharedList(arg1: *mut FILE);
}
extern "C" {
pub fn CPLCleanupSharedFileMutex();
}
extern "C" {
pub fn CPLDMSToDec(is: *const libc::c_char) -> f64;
}
extern "C" {
pub fn CPLDecToDMS(
dfAngle: f64,
pszAxis: *const libc::c_char,
nPrecision: libc::c_int,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLPackedDMSToDec(arg1: f64) -> f64;
}
extern "C" {
pub fn CPLDecToPackedDMS(dfDec: f64) -> f64;
}
extern "C" {
pub fn CPLStringToComplex(pszString: *const libc::c_char, pdfReal: *mut f64, pdfImag: *mut f64);
}
extern "C" {
pub fn CPLUnlinkTree(arg1: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn CPLCopyFile(
pszNewPath: *const libc::c_char,
pszOldPath: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn CPLCopyTree(
pszNewPath: *const libc::c_char,
pszOldPath: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn CPLMoveFile(
pszNewPath: *const libc::c_char,
pszOldPath: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn CPLSymlink(
pszOldPath: *const libc::c_char,
pszNewPath: *const libc::c_char,
papszOptions: CSLConstList,
) -> libc::c_int;
}
extern "C" {
pub fn CPLCreateZip(
pszZipFilename: *const libc::c_char,
papszOptions: *mut *mut libc::c_char,
) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLCreateFileInZip(
hZip: *mut libc::c_void,
pszFilename: *const libc::c_char,
papszOptions: *mut *mut libc::c_char,
) -> CPLErr::Type;
}
extern "C" {
pub fn CPLWriteFileInZip(
hZip: *mut libc::c_void,
pBuffer: *const libc::c_void,
nBufferSize: libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn CPLCloseFileInZip(hZip: *mut libc::c_void) -> CPLErr::Type;
}
extern "C" {
pub fn CPLCloseZip(hZip: *mut libc::c_void) -> CPLErr::Type;
}
extern "C" {
pub fn CPLZLibDeflate(
ptr: *const libc::c_void,
nBytes: usize,
nLevel: libc::c_int,
outptr: *mut libc::c_void,
nOutAvailableBytes: usize,
pnOutBytes: *mut usize,
) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLZLibInflate(
ptr: *const libc::c_void,
nBytes: usize,
outptr: *mut libc::c_void,
nOutAvailableBytes: usize,
pnOutBytes: *mut usize,
) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLValidateXML(
pszXMLFilename: *const libc::c_char,
pszXSDFilename: *const libc::c_char,
papszOptions: CSLConstList,
) -> libc::c_int;
}
extern "C" {
pub fn CPLsetlocale(category: libc::c_int, locale: *const libc::c_char) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLCleanupSetlocaleMutex();
}
extern "C" {
pub fn CPLIsPowerOfTwo(i: libc::c_uint) -> libc::c_int;
}
extern "C" {
pub fn CSLAddString(
papszStrList: *mut *mut libc::c_char,
pszNewString: *const libc::c_char,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLAddStringMayFail(
papszStrList: *mut *mut libc::c_char,
pszNewString: *const libc::c_char,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLCount(papszStrList: CSLConstList) -> libc::c_int;
}
extern "C" {
pub fn CSLGetField(arg1: CSLConstList, arg2: libc::c_int) -> *const libc::c_char;
}
extern "C" {
pub fn CSLDestroy(papszStrList: *mut *mut libc::c_char);
}
extern "C" {
pub fn CSLDuplicate(papszStrList: CSLConstList) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLMerge(
papszOrig: *mut *mut libc::c_char,
papszOverride: CSLConstList,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLTokenizeString(pszString: *const libc::c_char) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLTokenizeStringComplex(
pszString: *const libc::c_char,
pszDelimiter: *const libc::c_char,
bHonourStrings: libc::c_int,
bAllowEmptyTokens: libc::c_int,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLTokenizeString2(
pszString: *const libc::c_char,
pszDelimiter: *const libc::c_char,
nCSLTFlags: libc::c_int,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLPrint(papszStrList: CSLConstList, fpOut: *mut FILE) -> libc::c_int;
}
extern "C" {
pub fn CSLLoad(pszFname: *const libc::c_char) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLLoad2(
pszFname: *const libc::c_char,
nMaxLines: libc::c_int,
nMaxCols: libc::c_int,
papszOptions: CSLConstList,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLSave(papszStrList: CSLConstList, pszFname: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn CSLInsertStrings(
papszStrList: *mut *mut libc::c_char,
nInsertAtLineNo: libc::c_int,
papszNewLines: CSLConstList,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLInsertString(
papszStrList: *mut *mut libc::c_char,
nInsertAtLineNo: libc::c_int,
pszNewLine: *const libc::c_char,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLRemoveStrings(
papszStrList: *mut *mut libc::c_char,
nFirstLineToDelete: libc::c_int,
nNumToRemove: libc::c_int,
ppapszRetStrings: *mut *mut *mut libc::c_char,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLFindString(papszList: CSLConstList, pszTarget: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn CSLFindStringCaseSensitive(
papszList: CSLConstList,
pszTarget: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn CSLPartialFindString(
papszHaystack: CSLConstList,
pszNeedle: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn CSLFindName(papszStrList: CSLConstList, pszName: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn CSLFetchBoolean(
papszStrList: CSLConstList,
pszKey: *const libc::c_char,
bDefault: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn CSLTestBoolean(pszValue: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn CPLTestBoolean(pszValue: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn CPLParseNameValue(
pszNameValue: *const libc::c_char,
ppszKey: *mut *mut libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CSLFetchNameValue(
papszStrList: CSLConstList,
pszName: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CSLFetchNameValueDef(
papszStrList: CSLConstList,
pszName: *const libc::c_char,
pszDefault: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CSLFetchNameValueMultiple(
papszStrList: CSLConstList,
pszName: *const libc::c_char,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLAddNameValue(
papszStrList: *mut *mut libc::c_char,
pszName: *const libc::c_char,
pszValue: *const libc::c_char,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLSetNameValue(
papszStrList: *mut *mut libc::c_char,
pszName: *const libc::c_char,
pszValue: *const libc::c_char,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CSLSetNameValueSeparator(
papszStrList: *mut *mut libc::c_char,
pszSeparator: *const libc::c_char,
);
}
extern "C" {
pub fn CSLParseCommandLine(pszCommandLine: *const libc::c_char) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CPLEscapeString(
pszString: *const libc::c_char,
nLength: libc::c_int,
nScheme: libc::c_int,
) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLUnescapeString(
pszString: *const libc::c_char,
pnLength: *mut libc::c_int,
nScheme: libc::c_int,
) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLBinaryToHex(nBytes: libc::c_int, pabyData: *const GByte) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLHexToBinary(pszHex: *const libc::c_char, pnBytes: *mut libc::c_int) -> *mut GByte;
}
extern "C" {
pub fn CPLBase64Encode(nBytes: libc::c_int, pabyData: *const GByte) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLBase64DecodeInPlace(pszBase64: *mut GByte) -> libc::c_int;
}
pub mod CPLValueType {
pub type Type = libc::c_uint;
pub const CPL_VALUE_STRING: Type = 0;
pub const CPL_VALUE_REAL: Type = 1;
pub const CPL_VALUE_INTEGER: Type = 2;
}
extern "C" {
pub fn CPLGetValueType(pszValue: *const libc::c_char) -> CPLValueType::Type;
}
extern "C" {
pub fn CPLStrlcpy(
pszDest: *mut libc::c_char,
pszSrc: *const libc::c_char,
nDestSize: usize,
) -> usize;
}
extern "C" {
pub fn CPLStrlcat(
pszDest: *mut libc::c_char,
pszSrc: *const libc::c_char,
nDestSize: usize,
) -> usize;
}
extern "C" {
pub fn CPLStrnlen(pszStr: *const libc::c_char, nMaxLen: usize) -> usize;
}
extern "C" {
pub fn CPLvsnprintf(
str_: *mut libc::c_char,
size: usize,
fmt: *const libc::c_char,
args: *mut __va_list_tag,
) -> libc::c_int;
}
extern "C" {
pub fn CPLsnprintf(
str_: *mut libc::c_char,
size: usize,
fmt: *const libc::c_char,
...
) -> libc::c_int;
}
extern "C" {
pub fn CPLsprintf(str_: *mut libc::c_char, fmt: *const libc::c_char, ...) -> libc::c_int;
}
extern "C" {
pub fn CPLprintf(fmt: *const libc::c_char, ...) -> libc::c_int;
}
extern "C" {
pub fn CPLsscanf(str_: *const libc::c_char, fmt: *const libc::c_char, ...) -> libc::c_int;
}
extern "C" {
pub fn CPLSPrintf(fmt: *const libc::c_char, ...) -> *const libc::c_char;
}
extern "C" {
pub fn CSLAppendPrintf(
papszStrList: *mut *mut libc::c_char,
fmt: *const libc::c_char,
...
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn CPLVASPrintf(
buf: *mut *mut libc::c_char,
fmt: *const libc::c_char,
args: *mut __va_list_tag,
) -> libc::c_int;
}
extern "C" {
pub fn CPLEncodingCharSize(pszEncoding: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn CPLClearRecodeWarningFlags();
}
extern "C" {
pub fn CPLRecode(
pszSource: *const libc::c_char,
pszSrcEncoding: *const libc::c_char,
pszDstEncoding: *const libc::c_char,
) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLRecodeFromWChar(
pwszSource: *const wchar_t,
pszSrcEncoding: *const libc::c_char,
pszDstEncoding: *const libc::c_char,
) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLRecodeToWChar(
pszSource: *const libc::c_char,
pszSrcEncoding: *const libc::c_char,
pszDstEncoding: *const libc::c_char,
) -> *mut wchar_t;
}
extern "C" {
pub fn CPLIsUTF8(pabyData: *const libc::c_char, nLen: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn CPLForceToASCII(
pabyData: *const libc::c_char,
nLen: libc::c_int,
chReplacementChar: libc::c_char,
) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLStrlenUTF8(pszUTF8Str: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn CPLCanRecode(
pszTestStr: *const libc::c_char,
pszSrcEncoding: *const libc::c_char,
pszDstEncoding: *const libc::c_char,
) -> libc::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct _CPLHashSet {
_unused: [u8; 0],
}
pub type CPLHashSet = _CPLHashSet;
pub type CPLHashSetHashFunc =
::std::option::Option<unsafe extern "C" fn(elt: *const libc::c_void) -> libc::c_ulong>;
pub type CPLHashSetEqualFunc = ::std::option::Option<
unsafe extern "C" fn(elt1: *const libc::c_void, elt2: *const libc::c_void) -> libc::c_int,
>;
pub type CPLHashSetFreeEltFunc =
::std::option::Option<unsafe extern "C" fn(elt: *mut libc::c_void)>;
pub type CPLHashSetIterEltFunc = ::std::option::Option<
unsafe extern "C" fn(elt: *mut libc::c_void, user_data: *mut libc::c_void) -> libc::c_int,
>;
extern "C" {
pub fn CPLHashSetNew(
fnHashFunc: CPLHashSetHashFunc,
fnEqualFunc: CPLHashSetEqualFunc,
fnFreeEltFunc: CPLHashSetFreeEltFunc,
) -> *mut CPLHashSet;
}
extern "C" {
pub fn CPLHashSetDestroy(set: *mut CPLHashSet);
}
extern "C" {
pub fn CPLHashSetClear(set: *mut CPLHashSet);
}
extern "C" {
pub fn CPLHashSetSize(set: *const CPLHashSet) -> libc::c_int;
}
extern "C" {
pub fn CPLHashSetForeach(
set: *mut CPLHashSet,
fnIterFunc: CPLHashSetIterEltFunc,
user_data: *mut libc::c_void,
);
}
extern "C" {
pub fn CPLHashSetInsert(set: *mut CPLHashSet, elt: *mut libc::c_void) -> libc::c_int;
}
extern "C" {
pub fn CPLHashSetLookup(set: *mut CPLHashSet, elt: *const libc::c_void) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLHashSetRemove(set: *mut CPLHashSet, elt: *const libc::c_void) -> libc::c_int;
}
extern "C" {
pub fn CPLHashSetRemoveDeferRehash(
set: *mut CPLHashSet,
elt: *const libc::c_void,
) -> libc::c_int;
}
extern "C" {
pub fn CPLHashSetHashPointer(elt: *const libc::c_void) -> libc::c_ulong;
}
extern "C" {
pub fn CPLHashSetEqualPointer(
elt1: *const libc::c_void,
elt2: *const libc::c_void,
) -> libc::c_int;
}
extern "C" {
pub fn CPLHashSetHashStr(pszStr: *const libc::c_void) -> libc::c_ulong;
}
extern "C" {
pub fn CPLHashSetEqualStr(
pszStr1: *const libc::c_void,
pszStr2: *const libc::c_void,
) -> libc::c_int;
}
pub type CPLList = _CPLList;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct _CPLList {
pub pData: *mut libc::c_void,
pub psNext: *mut _CPLList,
}
#[test]
fn bindgen_test_layout__CPLList() {
assert_eq!(
::std::mem::size_of::<_CPLList>(),
16usize,
concat!("Size of: ", stringify!(_CPLList))
);
assert_eq!(
::std::mem::align_of::<_CPLList>(),
8usize,
concat!("Alignment of ", stringify!(_CPLList))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_CPLList>())).pData as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(_CPLList),
"::",
stringify!(pData)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<_CPLList>())).psNext as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(_CPLList),
"::",
stringify!(psNext)
)
);
}
extern "C" {
pub fn CPLListAppend(psList: *mut CPLList, pData: *mut libc::c_void) -> *mut CPLList;
}
extern "C" {
pub fn CPLListInsert(
psList: *mut CPLList,
pData: *mut libc::c_void,
nPosition: libc::c_int,
) -> *mut CPLList;
}
extern "C" {
pub fn CPLListGetLast(psList: *mut CPLList) -> *mut CPLList;
}
extern "C" {
pub fn CPLListGet(psList: *mut CPLList, nPosition: libc::c_int) -> *mut CPLList;
}
extern "C" {
pub fn CPLListCount(psList: *const CPLList) -> libc::c_int;
}
extern "C" {
pub fn CPLListRemove(psList: *mut CPLList, nPosition: libc::c_int) -> *mut CPLList;
}
extern "C" {
pub fn CPLListDestroy(psList: *mut CPLList);
}
extern "C" {
pub fn CPLListGetNext(psElement: *const CPLList) -> *mut CPLList;
}
extern "C" {
pub fn CPLListGetData(psElement: *const CPLList) -> *mut libc::c_void;
}
pub mod CPLXMLNodeType {
pub type Type = libc::c_uint;
pub const CXT_Element: Type = 0;
pub const CXT_Text: Type = 1;
pub const CXT_Attribute: Type = 2;
pub const CXT_Comment: Type = 3;
pub const CXT_Literal: Type = 4;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct CPLXMLNode {
pub eType: CPLXMLNodeType::Type,
pub pszValue: *mut libc::c_char,
pub psNext: *mut CPLXMLNode,
pub psChild: *mut CPLXMLNode,
}
#[test]
fn bindgen_test_layout_CPLXMLNode() {
assert_eq!(
::std::mem::size_of::<CPLXMLNode>(),
32usize,
concat!("Size of: ", stringify!(CPLXMLNode))
);
assert_eq!(
::std::mem::align_of::<CPLXMLNode>(),
8usize,
concat!("Alignment of ", stringify!(CPLXMLNode))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLXMLNode>())).eType as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(CPLXMLNode),
"::",
stringify!(eType)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLXMLNode>())).pszValue as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(CPLXMLNode),
"::",
stringify!(pszValue)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLXMLNode>())).psNext as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(CPLXMLNode),
"::",
stringify!(psNext)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLXMLNode>())).psChild as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(CPLXMLNode),
"::",
stringify!(psChild)
)
);
}
extern "C" {
pub fn CPLParseXMLString(arg1: *const libc::c_char) -> *mut CPLXMLNode;
}
extern "C" {
pub fn CPLDestroyXMLNode(arg1: *mut CPLXMLNode);
}
extern "C" {
pub fn CPLGetXMLNode(poRoot: *mut CPLXMLNode, pszPath: *const libc::c_char) -> *mut CPLXMLNode;
}
extern "C" {
pub fn CPLSearchXMLNode(
poRoot: *mut CPLXMLNode,
pszTarget: *const libc::c_char,
) -> *mut CPLXMLNode;
}
extern "C" {
pub fn CPLGetXMLValue(
poRoot: *const CPLXMLNode,
pszPath: *const libc::c_char,
pszDefault: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn CPLCreateXMLNode(
poParent: *mut CPLXMLNode,
eType: CPLXMLNodeType::Type,
pszText: *const libc::c_char,
) -> *mut CPLXMLNode;
}
extern "C" {
pub fn CPLSerializeXMLTree(psNode: *const CPLXMLNode) -> *mut libc::c_char;
}
extern "C" {
pub fn CPLAddXMLChild(psParent: *mut CPLXMLNode, psChild: *mut CPLXMLNode);
}
extern "C" {
pub fn CPLRemoveXMLChild(psParent: *mut CPLXMLNode, psChild: *mut CPLXMLNode) -> libc::c_int;
}
extern "C" {
pub fn CPLAddXMLSibling(psOlderSibling: *mut CPLXMLNode, psNewSibling: *mut CPLXMLNode);
}
extern "C" {
pub fn CPLCreateXMLElementAndValue(
psParent: *mut CPLXMLNode,
pszName: *const libc::c_char,
pszValue: *const libc::c_char,
) -> *mut CPLXMLNode;
}
extern "C" {
pub fn CPLAddXMLAttributeAndValue(
psParent: *mut CPLXMLNode,
pszName: *const libc::c_char,
pszValue: *const libc::c_char,
);
}
extern "C" {
pub fn CPLCloneXMLTree(psTree: *const CPLXMLNode) -> *mut CPLXMLNode;
}
extern "C" {
pub fn CPLSetXMLValue(
psRoot: *mut CPLXMLNode,
pszPath: *const libc::c_char,
pszValue: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn CPLStripXMLNamespace(
psRoot: *mut CPLXMLNode,
pszNameSpace: *const libc::c_char,
bRecurse: libc::c_int,
);
}
extern "C" {
pub fn CPLCleanXMLElementName(arg1: *mut libc::c_char);
}
extern "C" {
pub fn CPLParseXMLFile(pszFilename: *const libc::c_char) -> *mut CPLXMLNode;
}
extern "C" {
pub fn CPLSerializeXMLTreeToFile(
psTree: *const CPLXMLNode,
pszFilename: *const libc::c_char,
) -> libc::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct CPLRectObj {
pub minx: f64,
pub miny: f64,
pub maxx: f64,
pub maxy: f64,
}
#[test]
fn bindgen_test_layout_CPLRectObj() {
assert_eq!(
::std::mem::size_of::<CPLRectObj>(),
32usize,
concat!("Size of: ", stringify!(CPLRectObj))
);
assert_eq!(
::std::mem::align_of::<CPLRectObj>(),
8usize,
concat!("Alignment of ", stringify!(CPLRectObj))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLRectObj>())).minx as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(CPLRectObj),
"::",
stringify!(minx)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLRectObj>())).miny as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(CPLRectObj),
"::",
stringify!(miny)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLRectObj>())).maxx as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(CPLRectObj),
"::",
stringify!(maxx)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<CPLRectObj>())).maxy as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(CPLRectObj),
"::",
stringify!(maxy)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct _CPLQuadTree {
_unused: [u8; 0],
}
pub type CPLQuadTree = _CPLQuadTree;
pub type CPLQuadTreeGetBoundsFunc = ::std::option::Option<
unsafe extern "C" fn(hFeature: *const libc::c_void, pBounds: *mut CPLRectObj),
>;
pub type CPLQuadTreeForeachFunc = ::std::option::Option<
unsafe extern "C" fn(pElt: *mut libc::c_void, pUserData: *mut libc::c_void) -> libc::c_int,
>;
pub type CPLQuadTreeDumpFeatureFunc = ::std::option::Option<
unsafe extern "C" fn(
hFeature: *const libc::c_void,
nIndentLevel: libc::c_int,
pUserData: *mut libc::c_void,
),
>;
extern "C" {
pub fn CPLQuadTreeCreate(
pGlobalBounds: *const CPLRectObj,
pfnGetBounds: CPLQuadTreeGetBoundsFunc,
) -> *mut CPLQuadTree;
}
extern "C" {
pub fn CPLQuadTreeDestroy(hQuadtree: *mut CPLQuadTree);
}
extern "C" {
pub fn CPLQuadTreeSetBucketCapacity(hQuadtree: *mut CPLQuadTree, nBucketCapacity: libc::c_int);
}
extern "C" {
pub fn CPLQuadTreeGetAdvisedMaxDepth(nExpectedFeatures: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn CPLQuadTreeSetMaxDepth(hQuadtree: *mut CPLQuadTree, nMaxDepth: libc::c_int);
}
extern "C" {
pub fn CPLQuadTreeInsert(hQuadtree: *mut CPLQuadTree, hFeature: *mut libc::c_void);
}
extern "C" {
pub fn CPLQuadTreeInsertWithBounds(
hQuadtree: *mut CPLQuadTree,
hFeature: *mut libc::c_void,
psBounds: *const CPLRectObj,
);
}
extern "C" {
pub fn CPLQuadTreeRemove(
hQuadtree: *mut CPLQuadTree,
hFeature: *mut libc::c_void,
psBounds: *const CPLRectObj,
);
}
extern "C" {
pub fn CPLQuadTreeSearch(
hQuadtree: *const CPLQuadTree,
pAoi: *const CPLRectObj,
pnFeatureCount: *mut libc::c_int,
) -> *mut *mut libc::c_void;
}
extern "C" {
pub fn CPLQuadTreeForeach(
hQuadtree: *const CPLQuadTree,
pfnForeach: CPLQuadTreeForeachFunc,
pUserData: *mut libc::c_void,
);
}
extern "C" {
pub fn CPLQuadTreeDump(
hQuadtree: *const CPLQuadTree,
pfnDumpFeatureFunc: CPLQuadTreeDumpFeatureFunc,
pUserData: *mut libc::c_void,
);
}
extern "C" {
pub fn CPLQuadTreeGetStats(
hQuadtree: *const CPLQuadTree,
pnFeatureCount: *mut libc::c_int,
pnNodeCount: *mut libc::c_int,
pnMaxDepth: *mut libc::c_int,
pnMaxBucketCapacity: *mut libc::c_int,
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct CPLVirtualMem {
_unused: [u8; 0],
}
pub type CPLVirtualMemCachePageCbk = ::std::option::Option<
unsafe extern "C" fn(
ctxt: *mut CPLVirtualMem,
nOffset: usize,
pPageToFill: *mut libc::c_void,
nToFill: usize,
pUserData: *mut libc::c_void,
),
>;
pub type CPLVirtualMemUnCachePageCbk = ::std::option::Option<
unsafe extern "C" fn(
ctxt: *mut CPLVirtualMem,
nOffset: usize,
pPageToBeEvicted: *const libc::c_void,
nToBeEvicted: usize,
pUserData: *mut libc::c_void,
),
>;
pub type CPLVirtualMemFreeUserData =
::std::option::Option<unsafe extern "C" fn(pUserData: *mut libc::c_void)>;
pub mod CPLVirtualMemAccessMode {
pub type Type = libc::c_uint;
pub const VIRTUALMEM_READONLY: Type = 0;
pub const VIRTUALMEM_READONLY_ENFORCED: Type = 1;
pub const VIRTUALMEM_READWRITE: Type = 2;
}
extern "C" {
pub fn CPLGetPageSize() -> usize;
}
extern "C" {
pub fn CPLVirtualMemNew(
nSize: usize,
nCacheSize: usize,
nPageSizeHint: usize,
bSingleThreadUsage: libc::c_int,
eAccessMode: CPLVirtualMemAccessMode::Type,
pfnCachePage: CPLVirtualMemCachePageCbk,
pfnUnCachePage: CPLVirtualMemUnCachePageCbk,
pfnFreeUserData: CPLVirtualMemFreeUserData,
pCbkUserData: *mut libc::c_void,
) -> *mut CPLVirtualMem;
}
extern "C" {
pub fn CPLIsVirtualMemFileMapAvailable() -> libc::c_int;
}
extern "C" {
pub fn CPLVirtualMemFileMapNew(
fp: *mut VSILFILE,
nOffset: vsi_l_offset,
nLength: vsi_l_offset,
eAccessMode: CPLVirtualMemAccessMode::Type,
pfnFreeUserData: CPLVirtualMemFreeUserData,
pCbkUserData: *mut libc::c_void,
) -> *mut CPLVirtualMem;
}
extern "C" {
pub fn CPLVirtualMemDerivedNew(
pVMemBase: *mut CPLVirtualMem,
nOffset: vsi_l_offset,
nSize: vsi_l_offset,
pfnFreeUserData: CPLVirtualMemFreeUserData,
pCbkUserData: *mut libc::c_void,
) -> *mut CPLVirtualMem;
}
extern "C" {
pub fn CPLVirtualMemFree(ctxt: *mut CPLVirtualMem);
}
extern "C" {
pub fn CPLVirtualMemGetAddr(ctxt: *mut CPLVirtualMem) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLVirtualMemGetSize(ctxt: *mut CPLVirtualMem) -> usize;
}
extern "C" {
pub fn CPLVirtualMemIsFileMapping(ctxt: *mut CPLVirtualMem) -> libc::c_int;
}
extern "C" {
pub fn CPLVirtualMemGetAccessMode(ctxt: *mut CPLVirtualMem) -> CPLVirtualMemAccessMode::Type;
}
extern "C" {
pub fn CPLVirtualMemGetPageSize(ctxt: *mut CPLVirtualMem) -> usize;
}
extern "C" {
pub fn CPLVirtualMemIsAccessThreadSafe(ctxt: *mut CPLVirtualMem) -> libc::c_int;
}
extern "C" {
pub fn CPLVirtualMemDeclareThread(ctxt: *mut CPLVirtualMem);
}
extern "C" {
pub fn CPLVirtualMemUnDeclareThread(ctxt: *mut CPLVirtualMem);
}
extern "C" {
pub fn CPLVirtualMemPin(
ctxt: *mut CPLVirtualMem,
pAddr: *mut libc::c_void,
nSize: usize,
bWriteOp: libc::c_int,
);
}
extern "C" {
pub fn CPLVirtualMemManagerTerminate();
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGREnvelope {
pub MinX: f64,
pub MaxX: f64,
pub MinY: f64,
pub MaxY: f64,
}
#[test]
fn bindgen_test_layout_OGREnvelope() {
assert_eq!(
::std::mem::size_of::<OGREnvelope>(),
32usize,
concat!("Size of: ", stringify!(OGREnvelope))
);
assert_eq!(
::std::mem::align_of::<OGREnvelope>(),
8usize,
concat!("Alignment of ", stringify!(OGREnvelope))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGREnvelope>())).MinX as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGREnvelope),
"::",
stringify!(MinX)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGREnvelope>())).MaxX as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(OGREnvelope),
"::",
stringify!(MaxX)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGREnvelope>())).MinY as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(OGREnvelope),
"::",
stringify!(MinY)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGREnvelope>())).MaxY as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(OGREnvelope),
"::",
stringify!(MaxY)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGREnvelope3D {
pub MinX: f64,
pub MaxX: f64,
pub MinY: f64,
pub MaxY: f64,
pub MinZ: f64,
pub MaxZ: f64,
}
#[test]
fn bindgen_test_layout_OGREnvelope3D() {
assert_eq!(
::std::mem::size_of::<OGREnvelope3D>(),
48usize,
concat!("Size of: ", stringify!(OGREnvelope3D))
);
assert_eq!(
::std::mem::align_of::<OGREnvelope3D>(),
8usize,
concat!("Alignment of ", stringify!(OGREnvelope3D))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGREnvelope3D>())).MinX as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGREnvelope3D),
"::",
stringify!(MinX)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGREnvelope3D>())).MaxX as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(OGREnvelope3D),
"::",
stringify!(MaxX)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGREnvelope3D>())).MinY as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(OGREnvelope3D),
"::",
stringify!(MinY)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGREnvelope3D>())).MaxY as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(OGREnvelope3D),
"::",
stringify!(MaxY)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGREnvelope3D>())).MinZ as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(OGREnvelope3D),
"::",
stringify!(MinZ)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGREnvelope3D>())).MaxZ as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(OGREnvelope3D),
"::",
stringify!(MaxZ)
)
);
}
extern "C" {
pub fn OGRMalloc(arg1: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn OGRCalloc(arg1: usize, arg2: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn OGRRealloc(arg1: *mut libc::c_void, arg2: usize) -> *mut libc::c_void;
}
extern "C" {
pub fn OGRStrdup(arg1: *const libc::c_char) -> *mut libc::c_char;
}
extern "C" {
pub fn OGRFree(arg1: *mut libc::c_void);
}
pub mod OGRwkbGeometryType {
pub type Type = libc::c_uint;
pub const wkbUnknown: Type = 0;
pub const wkbPoint: Type = 1;
pub const wkbLineString: Type = 2;
pub const wkbPolygon: Type = 3;
pub const wkbMultiPoint: Type = 4;
pub const wkbMultiLineString: Type = 5;
pub const wkbMultiPolygon: Type = 6;
pub const wkbGeometryCollection: Type = 7;
pub const wkbCircularString: Type = 8;
pub const wkbCompoundCurve: Type = 9;
pub const wkbCurvePolygon: Type = 10;
pub const wkbMultiCurve: Type = 11;
pub const wkbMultiSurface: Type = 12;
pub const wkbCurve: Type = 13;
pub const wkbSurface: Type = 14;
pub const wkbPolyhedralSurface: Type = 15;
pub const wkbTIN: Type = 16;
pub const wkbTriangle: Type = 17;
pub const wkbNone: Type = 100;
pub const wkbLinearRing: Type = 101;
pub const wkbCircularStringZ: Type = 1008;
pub const wkbCompoundCurveZ: Type = 1009;
pub const wkbCurvePolygonZ: Type = 1010;
pub const wkbMultiCurveZ: Type = 1011;
pub const wkbMultiSurfaceZ: Type = 1012;
pub const wkbCurveZ: Type = 1013;
pub const wkbSurfaceZ: Type = 1014;
pub const wkbPolyhedralSurfaceZ: Type = 1015;
pub const wkbTINZ: Type = 1016;
pub const wkbTriangleZ: Type = 1017;
pub const wkbPointM: Type = 2001;
pub const wkbLineStringM: Type = 2002;
pub const wkbPolygonM: Type = 2003;
pub const wkbMultiPointM: Type = 2004;
pub const wkbMultiLineStringM: Type = 2005;
pub const wkbMultiPolygonM: Type = 2006;
pub const wkbGeometryCollectionM: Type = 2007;
pub const wkbCircularStringM: Type = 2008;
pub const wkbCompoundCurveM: Type = 2009;
pub const wkbCurvePolygonM: Type = 2010;
pub const wkbMultiCurveM: Type = 2011;
pub const wkbMultiSurfaceM: Type = 2012;
pub const wkbCurveM: Type = 2013;
pub const wkbSurfaceM: Type = 2014;
pub const wkbPolyhedralSurfaceM: Type = 2015;
pub const wkbTINM: Type = 2016;
pub const wkbTriangleM: Type = 2017;
pub const wkbPointZM: Type = 3001;
pub const wkbLineStringZM: Type = 3002;
pub const wkbPolygonZM: Type = 3003;
pub const wkbMultiPointZM: Type = 3004;
pub const wkbMultiLineStringZM: Type = 3005;
pub const wkbMultiPolygonZM: Type = 3006;
pub const wkbGeometryCollectionZM: Type = 3007;
pub const wkbCircularStringZM: Type = 3008;
pub const wkbCompoundCurveZM: Type = 3009;
pub const wkbCurvePolygonZM: Type = 3010;
pub const wkbMultiCurveZM: Type = 3011;
pub const wkbMultiSurfaceZM: Type = 3012;
pub const wkbCurveZM: Type = 3013;
pub const wkbSurfaceZM: Type = 3014;
pub const wkbPolyhedralSurfaceZM: Type = 3015;
pub const wkbTINZM: Type = 3016;
pub const wkbTriangleZM: Type = 3017;
pub const wkbPoint25D: Type = 2147483649;
pub const wkbLineString25D: Type = 2147483650;
pub const wkbPolygon25D: Type = 2147483651;
pub const wkbMultiPoint25D: Type = 2147483652;
pub const wkbMultiLineString25D: Type = 2147483653;
pub const wkbMultiPolygon25D: Type = 2147483654;
pub const wkbGeometryCollection25D: Type = 2147483655;
}
extern "C" {
pub fn OGRGeometryTypeToName(eType: OGRwkbGeometryType::Type) -> *const libc::c_char;
}
extern "C" {
pub fn OGRMergeGeometryTypes(
eMain: OGRwkbGeometryType::Type,
eExtra: OGRwkbGeometryType::Type,
) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGRMergeGeometryTypesEx(
eMain: OGRwkbGeometryType::Type,
eExtra: OGRwkbGeometryType::Type,
bAllowPromotingToCurves: libc::c_int,
) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGR_GT_Flatten(eType: OGRwkbGeometryType::Type) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGR_GT_SetZ(eType: OGRwkbGeometryType::Type) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGR_GT_SetM(eType: OGRwkbGeometryType::Type) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGR_GT_SetModifier(
eType: OGRwkbGeometryType::Type,
bSetZ: libc::c_int,
bSetM: libc::c_int,
) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGR_GT_HasZ(eType: OGRwkbGeometryType::Type) -> libc::c_int;
}
extern "C" {
pub fn OGR_GT_HasM(eType: OGRwkbGeometryType::Type) -> libc::c_int;
}
extern "C" {
pub fn OGR_GT_IsSubClassOf(
eType: OGRwkbGeometryType::Type,
eSuperType: OGRwkbGeometryType::Type,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_GT_IsCurve(arg1: OGRwkbGeometryType::Type) -> libc::c_int;
}
extern "C" {
pub fn OGR_GT_IsSurface(arg1: OGRwkbGeometryType::Type) -> libc::c_int;
}
extern "C" {
pub fn OGR_GT_IsNonLinear(arg1: OGRwkbGeometryType::Type) -> libc::c_int;
}
extern "C" {
pub fn OGR_GT_GetCollection(eType: OGRwkbGeometryType::Type) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGR_GT_GetCurve(eType: OGRwkbGeometryType::Type) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGR_GT_GetLinear(eType: OGRwkbGeometryType::Type) -> OGRwkbGeometryType::Type;
}
pub mod OGRwkbByteOrder {
pub type Type = libc::c_uint;
pub const wkbXDR: Type = 0;
pub const wkbNDR: Type = 1;
}
pub mod OGRFieldType {
pub type Type = libc::c_uint;
pub const OFTInteger: Type = 0;
pub const OFTIntegerList: Type = 1;
pub const OFTReal: Type = 2;
pub const OFTRealList: Type = 3;
pub const OFTString: Type = 4;
pub const OFTStringList: Type = 5;
pub const OFTWideString: Type = 6;
pub const OFTWideStringList: Type = 7;
pub const OFTBinary: Type = 8;
pub const OFTDate: Type = 9;
pub const OFTTime: Type = 10;
pub const OFTDateTime: Type = 11;
pub const OFTInteger64: Type = 12;
pub const OFTInteger64List: Type = 13;
pub const OFTMaxType: Type = 13;
}
pub mod OGRFieldSubType {
pub type Type = libc::c_uint;
pub const OFSTNone: Type = 0;
pub const OFSTBoolean: Type = 1;
pub const OFSTInt16: Type = 2;
pub const OFSTFloat32: Type = 3;
pub const OFSTJSON: Type = 4;
pub const OFSTUUID: Type = 5;
pub const OFSTMaxSubType: Type = 5;
}
pub mod OGRJustification {
pub type Type = libc::c_uint;
pub const OJUndefined: Type = 0;
pub const OJLeft: Type = 1;
pub const OJRight: Type = 2;
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union OGRField {
pub Integer: libc::c_int,
pub Integer64: GIntBig,
pub Real: f64,
pub String: *mut libc::c_char,
pub IntegerList: OGRField__bindgen_ty_1,
pub Integer64List: OGRField__bindgen_ty_2,
pub RealList: OGRField__bindgen_ty_3,
pub StringList: OGRField__bindgen_ty_4,
pub Binary: OGRField__bindgen_ty_5,
pub Set: OGRField__bindgen_ty_6,
pub Date: OGRField__bindgen_ty_7,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRField__bindgen_ty_1 {
pub nCount: libc::c_int,
pub paList: *mut libc::c_int,
}
#[test]
fn bindgen_test_layout_OGRField__bindgen_ty_1() {
assert_eq!(
::std::mem::size_of::<OGRField__bindgen_ty_1>(),
16usize,
concat!("Size of: ", stringify!(OGRField__bindgen_ty_1))
);
assert_eq!(
::std::mem::align_of::<OGRField__bindgen_ty_1>(),
8usize,
concat!("Alignment of ", stringify!(OGRField__bindgen_ty_1))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_1>())).nCount as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_1),
"::",
stringify!(nCount)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_1>())).paList as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_1),
"::",
stringify!(paList)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRField__bindgen_ty_2 {
pub nCount: libc::c_int,
pub paList: *mut GIntBig,
}
#[test]
fn bindgen_test_layout_OGRField__bindgen_ty_2() {
assert_eq!(
::std::mem::size_of::<OGRField__bindgen_ty_2>(),
16usize,
concat!("Size of: ", stringify!(OGRField__bindgen_ty_2))
);
assert_eq!(
::std::mem::align_of::<OGRField__bindgen_ty_2>(),
8usize,
concat!("Alignment of ", stringify!(OGRField__bindgen_ty_2))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_2>())).nCount as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_2),
"::",
stringify!(nCount)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_2>())).paList as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_2),
"::",
stringify!(paList)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRField__bindgen_ty_3 {
pub nCount: libc::c_int,
pub paList: *mut f64,
}
#[test]
fn bindgen_test_layout_OGRField__bindgen_ty_3() {
assert_eq!(
::std::mem::size_of::<OGRField__bindgen_ty_3>(),
16usize,
concat!("Size of: ", stringify!(OGRField__bindgen_ty_3))
);
assert_eq!(
::std::mem::align_of::<OGRField__bindgen_ty_3>(),
8usize,
concat!("Alignment of ", stringify!(OGRField__bindgen_ty_3))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_3>())).nCount as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_3),
"::",
stringify!(nCount)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_3>())).paList as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_3),
"::",
stringify!(paList)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRField__bindgen_ty_4 {
pub nCount: libc::c_int,
pub paList: *mut *mut libc::c_char,
}
#[test]
fn bindgen_test_layout_OGRField__bindgen_ty_4() {
assert_eq!(
::std::mem::size_of::<OGRField__bindgen_ty_4>(),
16usize,
concat!("Size of: ", stringify!(OGRField__bindgen_ty_4))
);
assert_eq!(
::std::mem::align_of::<OGRField__bindgen_ty_4>(),
8usize,
concat!("Alignment of ", stringify!(OGRField__bindgen_ty_4))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_4>())).nCount as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_4),
"::",
stringify!(nCount)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_4>())).paList as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_4),
"::",
stringify!(paList)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRField__bindgen_ty_5 {
pub nCount: libc::c_int,
pub paData: *mut GByte,
}
#[test]
fn bindgen_test_layout_OGRField__bindgen_ty_5() {
assert_eq!(
::std::mem::size_of::<OGRField__bindgen_ty_5>(),
16usize,
concat!("Size of: ", stringify!(OGRField__bindgen_ty_5))
);
assert_eq!(
::std::mem::align_of::<OGRField__bindgen_ty_5>(),
8usize,
concat!("Alignment of ", stringify!(OGRField__bindgen_ty_5))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_5>())).nCount as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_5),
"::",
stringify!(nCount)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_5>())).paData as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_5),
"::",
stringify!(paData)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRField__bindgen_ty_6 {
pub nMarker1: libc::c_int,
pub nMarker2: libc::c_int,
pub nMarker3: libc::c_int,
}
#[test]
fn bindgen_test_layout_OGRField__bindgen_ty_6() {
assert_eq!(
::std::mem::size_of::<OGRField__bindgen_ty_6>(),
12usize,
concat!("Size of: ", stringify!(OGRField__bindgen_ty_6))
);
assert_eq!(
::std::mem::align_of::<OGRField__bindgen_ty_6>(),
4usize,
concat!("Alignment of ", stringify!(OGRField__bindgen_ty_6))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_6>())).nMarker1 as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_6),
"::",
stringify!(nMarker1)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_6>())).nMarker2 as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_6),
"::",
stringify!(nMarker2)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_6>())).nMarker3 as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_6),
"::",
stringify!(nMarker3)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRField__bindgen_ty_7 {
pub Year: GInt16,
pub Month: GByte,
pub Day: GByte,
pub Hour: GByte,
pub Minute: GByte,
pub TZFlag: GByte,
pub Reserved: GByte,
pub Second: f32,
}
#[test]
fn bindgen_test_layout_OGRField__bindgen_ty_7() {
assert_eq!(
::std::mem::size_of::<OGRField__bindgen_ty_7>(),
12usize,
concat!("Size of: ", stringify!(OGRField__bindgen_ty_7))
);
assert_eq!(
::std::mem::align_of::<OGRField__bindgen_ty_7>(),
4usize,
concat!("Alignment of ", stringify!(OGRField__bindgen_ty_7))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_7>())).Year as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_7),
"::",
stringify!(Year)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_7>())).Month as *const _ as usize },
2usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_7),
"::",
stringify!(Month)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_7>())).Day as *const _ as usize },
3usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_7),
"::",
stringify!(Day)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_7>())).Hour as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_7),
"::",
stringify!(Hour)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_7>())).Minute as *const _ as usize },
5usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_7),
"::",
stringify!(Minute)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_7>())).TZFlag as *const _ as usize },
6usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_7),
"::",
stringify!(TZFlag)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_7>())).Reserved as *const _ as usize },
7usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_7),
"::",
stringify!(Reserved)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField__bindgen_ty_7>())).Second as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(OGRField__bindgen_ty_7),
"::",
stringify!(Second)
)
);
}
#[test]
fn bindgen_test_layout_OGRField() {
assert_eq!(
::std::mem::size_of::<OGRField>(),
16usize,
concat!("Size of: ", stringify!(OGRField))
);
assert_eq!(
::std::mem::align_of::<OGRField>(),
8usize,
concat!("Alignment of ", stringify!(OGRField))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField>())).Integer as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField),
"::",
stringify!(Integer)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField>())).Integer64 as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField),
"::",
stringify!(Integer64)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField>())).Real as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField),
"::",
stringify!(Real)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField>())).String as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField),
"::",
stringify!(String)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField>())).IntegerList as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField),
"::",
stringify!(IntegerList)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField>())).Integer64List as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField),
"::",
stringify!(Integer64List)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField>())).RealList as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField),
"::",
stringify!(RealList)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField>())).StringList as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField),
"::",
stringify!(StringList)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField>())).Binary as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField),
"::",
stringify!(Binary)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField>())).Set as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField),
"::",
stringify!(Set)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRField>())).Date as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRField),
"::",
stringify!(Date)
)
);
}
extern "C" {
pub fn OGRParseDate(
pszInput: *const libc::c_char,
psOutput: *mut OGRField,
nOptions: libc::c_int,
) -> libc::c_int;
}
pub mod ogr_style_tool_class_id {
pub type Type = libc::c_uint;
pub const OGRSTCNone: Type = 0;
pub const OGRSTCPen: Type = 1;
pub const OGRSTCBrush: Type = 2;
pub const OGRSTCSymbol: Type = 3;
pub const OGRSTCLabel: Type = 4;
pub const OGRSTCVector: Type = 5;
}
pub use self::ogr_style_tool_class_id::Type as OGRSTClassId;
pub mod ogr_style_tool_units_id {
pub type Type = libc::c_uint;
pub const OGRSTUGround: Type = 0;
pub const OGRSTUPixel: Type = 1;
pub const OGRSTUPoints: Type = 2;
pub const OGRSTUMM: Type = 3;
pub const OGRSTUCM: Type = 4;
pub const OGRSTUInches: Type = 5;
}
pub use self::ogr_style_tool_units_id::Type as OGRSTUnitId;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRCodedValue {
pub pszCode: *mut libc::c_char,
pub pszValue: *mut libc::c_char,
}
#[test]
fn bindgen_test_layout_OGRCodedValue() {
assert_eq!(
::std::mem::size_of::<OGRCodedValue>(),
16usize,
concat!("Size of: ", stringify!(OGRCodedValue))
);
assert_eq!(
::std::mem::align_of::<OGRCodedValue>(),
8usize,
concat!("Alignment of ", stringify!(OGRCodedValue))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRCodedValue>())).pszCode as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OGRCodedValue),
"::",
stringify!(pszCode)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OGRCodedValue>())).pszValue as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(OGRCodedValue),
"::",
stringify!(pszValue)
)
);
}
pub mod OGRFieldDomainType {
pub type Type = libc::c_uint;
pub const OFDT_CODED: Type = 0;
pub const OFDT_RANGE: Type = 1;
pub const OFDT_GLOB: Type = 2;
}
pub mod OGRFieldDomainSplitPolicy {
pub type Type = libc::c_uint;
pub const OFDSP_DEFAULT_VALUE: Type = 0;
pub const OFDSP_DUPLICATE: Type = 1;
pub const OFDSP_GEOMETRY_RATIO: Type = 2;
}
pub mod OGRFieldDomainMergePolicy {
pub type Type = libc::c_uint;
pub const OFDMP_DEFAULT_VALUE: Type = 0;
pub const OFDMP_SUM: Type = 1;
pub const OFDMP_GEOMETRY_WEIGHTED: Type = 2;
}
extern "C" {
pub fn GDALVersionInfo(arg1: *const libc::c_char) -> *const libc::c_char;
}
extern "C" {
pub fn GDALCheckVersion(
nVersionMajor: libc::c_int,
nVersionMinor: libc::c_int,
pszCallingComponentName: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn OGRGetGEOSVersion(
pnMajor: *mut libc::c_int,
pnMinor: *mut libc::c_int,
pnPatch: *mut libc::c_int,
) -> bool;
}
pub type OGRGeometryH = *mut libc::c_void;
pub type OGRSpatialReferenceH = *mut libc::c_void;
pub type OGRCoordinateTransformationH = *mut libc::c_void;
extern "C" {
pub fn OGR_G_CreateFromWkb(
arg1: *const libc::c_void,
arg2: OGRSpatialReferenceH,
arg3: *mut OGRGeometryH,
arg4: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_CreateFromWkbEx(
arg1: *const libc::c_void,
arg2: OGRSpatialReferenceH,
arg3: *mut OGRGeometryH,
arg4: usize,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_CreateFromWkt(
arg1: *mut *mut libc::c_char,
arg2: OGRSpatialReferenceH,
arg3: *mut OGRGeometryH,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_CreateFromFgf(
arg1: *const libc::c_void,
arg2: OGRSpatialReferenceH,
arg3: *mut OGRGeometryH,
arg4: libc::c_int,
arg5: *mut libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_DestroyGeometry(arg1: OGRGeometryH);
}
extern "C" {
pub fn OGR_G_CreateGeometry(arg1: OGRwkbGeometryType::Type) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_ApproximateArcAngles(
dfCenterX: f64,
dfCenterY: f64,
dfZ: f64,
dfPrimaryRadius: f64,
dfSecondaryAxis: f64,
dfRotation: f64,
dfStartAngle: f64,
dfEndAngle: f64,
dfMaxAngleStepSizeDegrees: f64,
) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_ForceToPolygon(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_ForceToLineString(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_ForceToMultiPolygon(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_ForceToMultiPoint(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_ForceToMultiLineString(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_ForceTo(
hGeom: OGRGeometryH,
eTargetType: OGRwkbGeometryType::Type,
papszOptions: *mut *mut libc::c_char,
) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_RemoveLowerDimensionSubGeoms(hGeom: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_GetDimension(arg1: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_GetCoordinateDimension(arg1: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_CoordinateDimension(arg1: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_SetCoordinateDimension(arg1: OGRGeometryH, arg2: libc::c_int);
}
extern "C" {
pub fn OGR_G_Is3D(arg1: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_IsMeasured(arg1: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Set3D(arg1: OGRGeometryH, arg2: libc::c_int);
}
extern "C" {
pub fn OGR_G_SetMeasured(arg1: OGRGeometryH, arg2: libc::c_int);
}
extern "C" {
pub fn OGR_G_Clone(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_GetEnvelope(arg1: OGRGeometryH, arg2: *mut OGREnvelope);
}
extern "C" {
pub fn OGR_G_GetEnvelope3D(arg1: OGRGeometryH, arg2: *mut OGREnvelope3D);
}
extern "C" {
pub fn OGR_G_ImportFromWkb(
arg1: OGRGeometryH,
arg2: *const libc::c_void,
arg3: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_ExportToWkb(
arg1: OGRGeometryH,
arg2: OGRwkbByteOrder::Type,
arg3: *mut libc::c_uchar,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_ExportToIsoWkb(
arg1: OGRGeometryH,
arg2: OGRwkbByteOrder::Type,
arg3: *mut libc::c_uchar,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_WkbSize(hGeom: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_WkbSizeEx(hGeom: OGRGeometryH) -> usize;
}
extern "C" {
pub fn OGR_G_ImportFromWkt(arg1: OGRGeometryH, arg2: *mut *mut libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_ExportToWkt(arg1: OGRGeometryH, arg2: *mut *mut libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_ExportToIsoWkt(arg1: OGRGeometryH, arg2: *mut *mut libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_GetGeometryType(arg1: OGRGeometryH) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGR_G_GetGeometryName(arg1: OGRGeometryH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_G_DumpReadable(arg1: OGRGeometryH, arg2: *mut FILE, arg3: *const libc::c_char);
}
extern "C" {
pub fn OGR_G_FlattenTo2D(arg1: OGRGeometryH);
}
extern "C" {
pub fn OGR_G_CloseRings(arg1: OGRGeometryH);
}
extern "C" {
pub fn OGR_G_CreateFromGML(arg1: *const libc::c_char) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_ExportToGML(arg1: OGRGeometryH) -> *mut libc::c_char;
}
extern "C" {
pub fn OGR_G_ExportToGMLEx(
arg1: OGRGeometryH,
papszOptions: *mut *mut libc::c_char,
) -> *mut libc::c_char;
}
extern "C" {
pub fn OGR_G_CreateFromGMLTree(arg1: *const CPLXMLNode) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_ExportToGMLTree(arg1: OGRGeometryH) -> *mut CPLXMLNode;
}
extern "C" {
pub fn OGR_G_ExportEnvelopeToGMLTree(arg1: OGRGeometryH) -> *mut CPLXMLNode;
}
extern "C" {
pub fn OGR_G_ExportToKML(
arg1: OGRGeometryH,
pszAltitudeMode: *const libc::c_char,
) -> *mut libc::c_char;
}
extern "C" {
pub fn OGR_G_ExportToJson(arg1: OGRGeometryH) -> *mut libc::c_char;
}
extern "C" {
pub fn OGR_G_ExportToJsonEx(
arg1: OGRGeometryH,
papszOptions: *mut *mut libc::c_char,
) -> *mut libc::c_char;
}
extern "C" {
pub fn OGR_G_CreateGeometryFromJson(arg1: *const libc::c_char) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_CreateGeometryFromEsriJson(arg1: *const libc::c_char) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_AssignSpatialReference(arg1: OGRGeometryH, arg2: OGRSpatialReferenceH);
}
extern "C" {
pub fn OGR_G_GetSpatialReference(arg1: OGRGeometryH) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn OGR_G_Transform(arg1: OGRGeometryH, arg2: OGRCoordinateTransformationH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_TransformTo(arg1: OGRGeometryH, arg2: OGRSpatialReferenceH) -> OGRErr::Type;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRGeomTransformer {
_unused: [u8; 0],
}
pub type OGRGeomTransformerH = *mut OGRGeomTransformer;
extern "C" {
pub fn OGR_GeomTransformer_Create(
arg1: OGRCoordinateTransformationH,
papszOptions: CSLConstList,
) -> OGRGeomTransformerH;
}
extern "C" {
pub fn OGR_GeomTransformer_Transform(
hTransformer: OGRGeomTransformerH,
hGeom: OGRGeometryH,
) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_GeomTransformer_Destroy(hTransformer: OGRGeomTransformerH);
}
extern "C" {
pub fn OGR_G_Simplify(hThis: OGRGeometryH, tolerance: f64) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_SimplifyPreserveTopology(hThis: OGRGeometryH, tolerance: f64) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_DelaunayTriangulation(
hThis: OGRGeometryH,
dfTolerance: f64,
bOnlyEdges: libc::c_int,
) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_Segmentize(hGeom: OGRGeometryH, dfMaxLength: f64);
}
extern "C" {
pub fn OGR_G_Intersects(arg1: OGRGeometryH, arg2: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Equals(arg1: OGRGeometryH, arg2: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Disjoint(arg1: OGRGeometryH, arg2: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Touches(arg1: OGRGeometryH, arg2: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Crosses(arg1: OGRGeometryH, arg2: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Within(arg1: OGRGeometryH, arg2: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Contains(arg1: OGRGeometryH, arg2: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Overlaps(arg1: OGRGeometryH, arg2: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Boundary(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_ConvexHull(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_Buffer(arg1: OGRGeometryH, arg2: f64, arg3: libc::c_int) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_Intersection(arg1: OGRGeometryH, arg2: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_Union(arg1: OGRGeometryH, arg2: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_UnionCascaded(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_PointOnSurface(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_Difference(arg1: OGRGeometryH, arg2: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_SymDifference(arg1: OGRGeometryH, arg2: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_Distance(arg1: OGRGeometryH, arg2: OGRGeometryH) -> f64;
}
extern "C" {
pub fn OGR_G_Distance3D(arg1: OGRGeometryH, arg2: OGRGeometryH) -> f64;
}
extern "C" {
pub fn OGR_G_Length(arg1: OGRGeometryH) -> f64;
}
extern "C" {
pub fn OGR_G_Area(arg1: OGRGeometryH) -> f64;
}
extern "C" {
pub fn OGR_G_Centroid(arg1: OGRGeometryH, arg2: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Value(arg1: OGRGeometryH, dfDistance: f64) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_Empty(arg1: OGRGeometryH);
}
extern "C" {
pub fn OGR_G_IsEmpty(arg1: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_IsValid(arg1: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_MakeValid(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_MakeValidEx(arg1: OGRGeometryH, arg2: CSLConstList) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_Normalize(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_IsSimple(arg1: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_IsRing(arg1: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Polygonize(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_Intersect(arg1: OGRGeometryH, arg2: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_Equal(arg1: OGRGeometryH, arg2: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_SymmetricDifference(arg1: OGRGeometryH, arg2: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_GetArea(arg1: OGRGeometryH) -> f64;
}
extern "C" {
pub fn OGR_G_GetBoundary(arg1: OGRGeometryH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_GetPointCount(arg1: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_GetPoints(
hGeom: OGRGeometryH,
pabyX: *mut libc::c_void,
nXStride: libc::c_int,
pabyY: *mut libc::c_void,
nYStride: libc::c_int,
pabyZ: *mut libc::c_void,
nZStride: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_GetPointsZM(
hGeom: OGRGeometryH,
pabyX: *mut libc::c_void,
nXStride: libc::c_int,
pabyY: *mut libc::c_void,
nYStride: libc::c_int,
pabyZ: *mut libc::c_void,
nZStride: libc::c_int,
pabyM: *mut libc::c_void,
nMStride: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_GetX(arg1: OGRGeometryH, arg2: libc::c_int) -> f64;
}
extern "C" {
pub fn OGR_G_GetY(arg1: OGRGeometryH, arg2: libc::c_int) -> f64;
}
extern "C" {
pub fn OGR_G_GetZ(arg1: OGRGeometryH, arg2: libc::c_int) -> f64;
}
extern "C" {
pub fn OGR_G_GetM(arg1: OGRGeometryH, arg2: libc::c_int) -> f64;
}
extern "C" {
pub fn OGR_G_GetPoint(
arg1: OGRGeometryH,
iPoint: libc::c_int,
arg2: *mut f64,
arg3: *mut f64,
arg4: *mut f64,
);
}
extern "C" {
pub fn OGR_G_GetPointZM(
arg1: OGRGeometryH,
iPoint: libc::c_int,
arg2: *mut f64,
arg3: *mut f64,
arg4: *mut f64,
arg5: *mut f64,
);
}
extern "C" {
pub fn OGR_G_SetPointCount(hGeom: OGRGeometryH, nNewPointCount: libc::c_int);
}
extern "C" {
pub fn OGR_G_SetPoint(arg1: OGRGeometryH, iPoint: libc::c_int, arg2: f64, arg3: f64, arg4: f64);
}
extern "C" {
pub fn OGR_G_SetPoint_2D(arg1: OGRGeometryH, iPoint: libc::c_int, arg2: f64, arg3: f64);
}
extern "C" {
pub fn OGR_G_SetPointM(
arg1: OGRGeometryH,
iPoint: libc::c_int,
arg2: f64,
arg3: f64,
arg4: f64,
);
}
extern "C" {
pub fn OGR_G_SetPointZM(
arg1: OGRGeometryH,
iPoint: libc::c_int,
arg2: f64,
arg3: f64,
arg4: f64,
arg5: f64,
);
}
extern "C" {
pub fn OGR_G_AddPoint(arg1: OGRGeometryH, arg2: f64, arg3: f64, arg4: f64);
}
extern "C" {
pub fn OGR_G_AddPoint_2D(arg1: OGRGeometryH, arg2: f64, arg3: f64);
}
extern "C" {
pub fn OGR_G_AddPointM(arg1: OGRGeometryH, arg2: f64, arg3: f64, arg4: f64);
}
extern "C" {
pub fn OGR_G_AddPointZM(arg1: OGRGeometryH, arg2: f64, arg3: f64, arg4: f64, arg5: f64);
}
extern "C" {
pub fn OGR_G_SetPoints(
hGeom: OGRGeometryH,
nPointsIn: libc::c_int,
pabyX: *const libc::c_void,
nXStride: libc::c_int,
pabyY: *const libc::c_void,
nYStride: libc::c_int,
pabyZ: *const libc::c_void,
nZStride: libc::c_int,
);
}
extern "C" {
pub fn OGR_G_SetPointsZM(
hGeom: OGRGeometryH,
nPointsIn: libc::c_int,
pabyX: *const libc::c_void,
nXStride: libc::c_int,
pabyY: *const libc::c_void,
nYStride: libc::c_int,
pabyZ: *const libc::c_void,
nZStride: libc::c_int,
pabyM: *const libc::c_void,
nMStride: libc::c_int,
);
}
extern "C" {
pub fn OGR_G_SwapXY(hGeom: OGRGeometryH);
}
extern "C" {
pub fn OGR_G_GetGeometryCount(arg1: OGRGeometryH) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_GetGeometryRef(arg1: OGRGeometryH, arg2: libc::c_int) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_AddGeometry(arg1: OGRGeometryH, arg2: OGRGeometryH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_AddGeometryDirectly(arg1: OGRGeometryH, arg2: OGRGeometryH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_RemoveGeometry(
arg1: OGRGeometryH,
arg2: libc::c_int,
arg3: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_G_HasCurveGeometry(
arg1: OGRGeometryH,
bLookForNonLinear: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_G_GetLinearGeometry(
hGeom: OGRGeometryH,
dfMaxAngleStepSizeDegrees: f64,
papszOptions: *mut *mut libc::c_char,
) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_G_GetCurveGeometry(
hGeom: OGRGeometryH,
papszOptions: *mut *mut libc::c_char,
) -> OGRGeometryH;
}
extern "C" {
pub fn OGRBuildPolygonFromEdges(
hLinesAsCollection: OGRGeometryH,
bBestEffort: libc::c_int,
bAutoClose: libc::c_int,
dfTolerance: f64,
peErr: *mut OGRErr::Type,
) -> OGRGeometryH;
}
extern "C" {
pub fn OGRSetGenerate_DB2_V72_BYTE_ORDER(
bGenerate_DB2_V72_BYTE_ORDER: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGRGetGenerate_DB2_V72_BYTE_ORDER() -> libc::c_int;
}
extern "C" {
pub fn OGRSetNonLinearGeometriesEnabledFlag(bFlag: libc::c_int);
}
extern "C" {
pub fn OGRGetNonLinearGeometriesEnabledFlag() -> libc::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct _OGRPreparedGeometry {
_unused: [u8; 0],
}
pub type OGRPreparedGeometryH = *mut _OGRPreparedGeometry;
extern "C" {
pub fn OGRHasPreparedGeometrySupport() -> libc::c_int;
}
extern "C" {
pub fn OGRCreatePreparedGeometry(hGeom: OGRGeometryH) -> OGRPreparedGeometryH;
}
extern "C" {
pub fn OGRDestroyPreparedGeometry(hPreparedGeom: OGRPreparedGeometryH);
}
extern "C" {
pub fn OGRPreparedGeometryIntersects(
hPreparedGeom: OGRPreparedGeometryH,
hOtherGeom: OGRGeometryH,
) -> libc::c_int;
}
extern "C" {
pub fn OGRPreparedGeometryContains(
hPreparedGeom: OGRPreparedGeometryH,
hOtherGeom: OGRGeometryH,
) -> libc::c_int;
}
pub type OGRFieldDefnH = *mut libc::c_void;
pub type OGRFeatureDefnH = *mut libc::c_void;
pub type OGRFeatureH = *mut libc::c_void;
pub type OGRStyleTableH = *mut libc::c_void;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRGeomFieldDefnHS {
_unused: [u8; 0],
}
pub type OGRGeomFieldDefnH = *mut OGRGeomFieldDefnHS;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRFieldDomainHS {
_unused: [u8; 0],
}
pub type OGRFieldDomainH = *mut OGRFieldDomainHS;
extern "C" {
pub fn OGR_Fld_Create(arg1: *const libc::c_char, arg2: OGRFieldType::Type) -> OGRFieldDefnH;
}
extern "C" {
pub fn OGR_Fld_Destroy(arg1: OGRFieldDefnH);
}
extern "C" {
pub fn OGR_Fld_SetName(arg1: OGRFieldDefnH, arg2: *const libc::c_char);
}
extern "C" {
pub fn OGR_Fld_GetNameRef(arg1: OGRFieldDefnH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_Fld_SetAlternativeName(arg1: OGRFieldDefnH, arg2: *const libc::c_char);
}
extern "C" {
pub fn OGR_Fld_GetAlternativeNameRef(arg1: OGRFieldDefnH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_Fld_GetType(arg1: OGRFieldDefnH) -> OGRFieldType::Type;
}
extern "C" {
pub fn OGR_Fld_SetType(arg1: OGRFieldDefnH, arg2: OGRFieldType::Type);
}
extern "C" {
pub fn OGR_Fld_GetSubType(arg1: OGRFieldDefnH) -> OGRFieldSubType::Type;
}
extern "C" {
pub fn OGR_Fld_SetSubType(arg1: OGRFieldDefnH, arg2: OGRFieldSubType::Type);
}
extern "C" {
pub fn OGR_Fld_GetJustify(arg1: OGRFieldDefnH) -> OGRJustification::Type;
}
extern "C" {
pub fn OGR_Fld_SetJustify(arg1: OGRFieldDefnH, arg2: OGRJustification::Type);
}
extern "C" {
pub fn OGR_Fld_GetWidth(arg1: OGRFieldDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_Fld_SetWidth(arg1: OGRFieldDefnH, arg2: libc::c_int);
}
extern "C" {
pub fn OGR_Fld_GetPrecision(arg1: OGRFieldDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_Fld_SetPrecision(arg1: OGRFieldDefnH, arg2: libc::c_int);
}
extern "C" {
pub fn OGR_Fld_Set(
arg1: OGRFieldDefnH,
arg2: *const libc::c_char,
arg3: OGRFieldType::Type,
arg4: libc::c_int,
arg5: libc::c_int,
arg6: OGRJustification::Type,
);
}
extern "C" {
pub fn OGR_Fld_IsIgnored(hDefn: OGRFieldDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_Fld_SetIgnored(hDefn: OGRFieldDefnH, arg1: libc::c_int);
}
extern "C" {
pub fn OGR_Fld_IsNullable(hDefn: OGRFieldDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_Fld_SetNullable(hDefn: OGRFieldDefnH, arg1: libc::c_int);
}
extern "C" {
pub fn OGR_Fld_IsUnique(hDefn: OGRFieldDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_Fld_SetUnique(hDefn: OGRFieldDefnH, arg1: libc::c_int);
}
extern "C" {
pub fn OGR_Fld_GetDefault(hDefn: OGRFieldDefnH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_Fld_SetDefault(hDefn: OGRFieldDefnH, arg1: *const libc::c_char);
}
extern "C" {
pub fn OGR_Fld_IsDefaultDriverSpecific(hDefn: OGRFieldDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_Fld_GetDomainName(hDefn: OGRFieldDefnH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_Fld_SetDomainName(hDefn: OGRFieldDefnH, arg1: *const libc::c_char);
}
extern "C" {
pub fn OGR_GetFieldTypeName(arg1: OGRFieldType::Type) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_GetFieldSubTypeName(arg1: OGRFieldSubType::Type) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_AreTypeSubTypeCompatible(
eType: OGRFieldType::Type,
eSubType: OGRFieldSubType::Type,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_GFld_Create(
arg1: *const libc::c_char,
arg2: OGRwkbGeometryType::Type,
) -> OGRGeomFieldDefnH;
}
extern "C" {
pub fn OGR_GFld_Destroy(arg1: OGRGeomFieldDefnH);
}
extern "C" {
pub fn OGR_GFld_SetName(arg1: OGRGeomFieldDefnH, arg2: *const libc::c_char);
}
extern "C" {
pub fn OGR_GFld_GetNameRef(arg1: OGRGeomFieldDefnH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_GFld_GetType(arg1: OGRGeomFieldDefnH) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGR_GFld_SetType(arg1: OGRGeomFieldDefnH, arg2: OGRwkbGeometryType::Type);
}
extern "C" {
pub fn OGR_GFld_GetSpatialRef(arg1: OGRGeomFieldDefnH) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn OGR_GFld_SetSpatialRef(arg1: OGRGeomFieldDefnH, hSRS: OGRSpatialReferenceH);
}
extern "C" {
pub fn OGR_GFld_IsNullable(hDefn: OGRGeomFieldDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_GFld_SetNullable(hDefn: OGRGeomFieldDefnH, arg1: libc::c_int);
}
extern "C" {
pub fn OGR_GFld_IsIgnored(hDefn: OGRGeomFieldDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_GFld_SetIgnored(hDefn: OGRGeomFieldDefnH, arg1: libc::c_int);
}
extern "C" {
pub fn OGR_FD_Create(arg1: *const libc::c_char) -> OGRFeatureDefnH;
}
extern "C" {
pub fn OGR_FD_Destroy(arg1: OGRFeatureDefnH);
}
extern "C" {
pub fn OGR_FD_Release(arg1: OGRFeatureDefnH);
}
extern "C" {
pub fn OGR_FD_GetName(arg1: OGRFeatureDefnH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_FD_GetFieldCount(arg1: OGRFeatureDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_FD_GetFieldDefn(arg1: OGRFeatureDefnH, arg2: libc::c_int) -> OGRFieldDefnH;
}
extern "C" {
pub fn OGR_FD_GetFieldIndex(arg1: OGRFeatureDefnH, arg2: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn OGR_FD_AddFieldDefn(arg1: OGRFeatureDefnH, arg2: OGRFieldDefnH);
}
extern "C" {
pub fn OGR_FD_DeleteFieldDefn(hDefn: OGRFeatureDefnH, iField: libc::c_int) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_FD_ReorderFieldDefns(
hDefn: OGRFeatureDefnH,
panMap: *const libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_FD_GetGeomType(arg1: OGRFeatureDefnH) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGR_FD_SetGeomType(arg1: OGRFeatureDefnH, arg2: OGRwkbGeometryType::Type);
}
extern "C" {
pub fn OGR_FD_IsGeometryIgnored(arg1: OGRFeatureDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_FD_SetGeometryIgnored(arg1: OGRFeatureDefnH, arg2: libc::c_int);
}
extern "C" {
pub fn OGR_FD_IsStyleIgnored(arg1: OGRFeatureDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_FD_SetStyleIgnored(arg1: OGRFeatureDefnH, arg2: libc::c_int);
}
extern "C" {
pub fn OGR_FD_Reference(arg1: OGRFeatureDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_FD_Dereference(arg1: OGRFeatureDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_FD_GetReferenceCount(arg1: OGRFeatureDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_FD_GetGeomFieldCount(hFDefn: OGRFeatureDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_FD_GetGeomFieldDefn(hFDefn: OGRFeatureDefnH, i: libc::c_int) -> OGRGeomFieldDefnH;
}
extern "C" {
pub fn OGR_FD_GetGeomFieldIndex(
hFDefn: OGRFeatureDefnH,
pszName: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_FD_AddGeomFieldDefn(hFDefn: OGRFeatureDefnH, hGFldDefn: OGRGeomFieldDefnH);
}
extern "C" {
pub fn OGR_FD_DeleteGeomFieldDefn(
hFDefn: OGRFeatureDefnH,
iGeomField: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_FD_IsSame(hFDefn: OGRFeatureDefnH, hOtherFDefn: OGRFeatureDefnH) -> libc::c_int;
}
extern "C" {
pub fn OGR_F_Create(arg1: OGRFeatureDefnH) -> OGRFeatureH;
}
extern "C" {
pub fn OGR_F_Destroy(arg1: OGRFeatureH);
}
extern "C" {
pub fn OGR_F_GetDefnRef(arg1: OGRFeatureH) -> OGRFeatureDefnH;
}
extern "C" {
pub fn OGR_F_SetGeometryDirectly(arg1: OGRFeatureH, arg2: OGRGeometryH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_F_SetGeometry(arg1: OGRFeatureH, arg2: OGRGeometryH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_F_GetGeometryRef(arg1: OGRFeatureH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_F_StealGeometry(arg1: OGRFeatureH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_F_Clone(arg1: OGRFeatureH) -> OGRFeatureH;
}
extern "C" {
pub fn OGR_F_Equal(arg1: OGRFeatureH, arg2: OGRFeatureH) -> libc::c_int;
}
extern "C" {
pub fn OGR_F_GetFieldCount(arg1: OGRFeatureH) -> libc::c_int;
}
extern "C" {
pub fn OGR_F_GetFieldDefnRef(arg1: OGRFeatureH, arg2: libc::c_int) -> OGRFieldDefnH;
}
extern "C" {
pub fn OGR_F_GetFieldIndex(arg1: OGRFeatureH, arg2: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn OGR_F_IsFieldSet(arg1: OGRFeatureH, arg2: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn OGR_F_UnsetField(arg1: OGRFeatureH, arg2: libc::c_int);
}
extern "C" {
pub fn OGR_F_IsFieldNull(arg1: OGRFeatureH, arg2: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn OGR_F_IsFieldSetAndNotNull(arg1: OGRFeatureH, arg2: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn OGR_F_SetFieldNull(arg1: OGRFeatureH, arg2: libc::c_int);
}
extern "C" {
pub fn OGR_F_GetRawFieldRef(arg1: OGRFeatureH, arg2: libc::c_int) -> *mut OGRField;
}
extern "C" {
pub fn OGR_RawField_IsUnset(arg1: *const OGRField) -> libc::c_int;
}
extern "C" {
pub fn OGR_RawField_IsNull(arg1: *const OGRField) -> libc::c_int;
}
extern "C" {
pub fn OGR_RawField_SetUnset(arg1: *mut OGRField);
}
extern "C" {
pub fn OGR_RawField_SetNull(arg1: *mut OGRField);
}
extern "C" {
pub fn OGR_F_GetFieldAsInteger(arg1: OGRFeatureH, arg2: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn OGR_F_GetFieldAsInteger64(arg1: OGRFeatureH, arg2: libc::c_int) -> GIntBig;
}
extern "C" {
pub fn OGR_F_GetFieldAsDouble(arg1: OGRFeatureH, arg2: libc::c_int) -> f64;
}
extern "C" {
pub fn OGR_F_GetFieldAsString(arg1: OGRFeatureH, arg2: libc::c_int) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_F_GetFieldAsIntegerList(
arg1: OGRFeatureH,
arg2: libc::c_int,
arg3: *mut libc::c_int,
) -> *const libc::c_int;
}
extern "C" {
pub fn OGR_F_GetFieldAsInteger64List(
arg1: OGRFeatureH,
arg2: libc::c_int,
arg3: *mut libc::c_int,
) -> *const GIntBig;
}
extern "C" {
pub fn OGR_F_GetFieldAsDoubleList(
arg1: OGRFeatureH,
arg2: libc::c_int,
arg3: *mut libc::c_int,
) -> *const f64;
}
extern "C" {
pub fn OGR_F_GetFieldAsStringList(
arg1: OGRFeatureH,
arg2: libc::c_int,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn OGR_F_GetFieldAsBinary(
arg1: OGRFeatureH,
arg2: libc::c_int,
arg3: *mut libc::c_int,
) -> *mut GByte;
}
extern "C" {
pub fn OGR_F_GetFieldAsDateTime(
arg1: OGRFeatureH,
arg2: libc::c_int,
arg3: *mut libc::c_int,
arg4: *mut libc::c_int,
arg5: *mut libc::c_int,
arg6: *mut libc::c_int,
arg7: *mut libc::c_int,
arg8: *mut libc::c_int,
arg9: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_F_GetFieldAsDateTimeEx(
hFeat: OGRFeatureH,
iField: libc::c_int,
pnYear: *mut libc::c_int,
pnMonth: *mut libc::c_int,
pnDay: *mut libc::c_int,
pnHour: *mut libc::c_int,
pnMinute: *mut libc::c_int,
pfSecond: *mut f32,
pnTZFlag: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_F_SetFieldInteger(arg1: OGRFeatureH, arg2: libc::c_int, arg3: libc::c_int);
}
extern "C" {
pub fn OGR_F_SetFieldInteger64(arg1: OGRFeatureH, arg2: libc::c_int, arg3: GIntBig);
}
extern "C" {
pub fn OGR_F_SetFieldDouble(arg1: OGRFeatureH, arg2: libc::c_int, arg3: f64);
}
extern "C" {
pub fn OGR_F_SetFieldString(arg1: OGRFeatureH, arg2: libc::c_int, arg3: *const libc::c_char);
}
extern "C" {
pub fn OGR_F_SetFieldIntegerList(
arg1: OGRFeatureH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: *const libc::c_int,
);
}
extern "C" {
pub fn OGR_F_SetFieldInteger64List(
arg1: OGRFeatureH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: *const GIntBig,
);
}
extern "C" {
pub fn OGR_F_SetFieldDoubleList(
arg1: OGRFeatureH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: *const f64,
);
}
extern "C" {
pub fn OGR_F_SetFieldStringList(arg1: OGRFeatureH, arg2: libc::c_int, arg3: CSLConstList);
}
extern "C" {
pub fn OGR_F_SetFieldRaw(arg1: OGRFeatureH, arg2: libc::c_int, arg3: *mut OGRField);
}
extern "C" {
pub fn OGR_F_SetFieldBinary(
arg1: OGRFeatureH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: *const libc::c_void,
);
}
extern "C" {
pub fn OGR_F_SetFieldDateTime(
arg1: OGRFeatureH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: libc::c_int,
arg5: libc::c_int,
arg6: libc::c_int,
arg7: libc::c_int,
arg8: libc::c_int,
arg9: libc::c_int,
);
}
extern "C" {
pub fn OGR_F_SetFieldDateTimeEx(
arg1: OGRFeatureH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: libc::c_int,
arg5: libc::c_int,
arg6: libc::c_int,
arg7: libc::c_int,
arg8: f32,
arg9: libc::c_int,
);
}
extern "C" {
pub fn OGR_F_GetGeomFieldCount(hFeat: OGRFeatureH) -> libc::c_int;
}
extern "C" {
pub fn OGR_F_GetGeomFieldDefnRef(hFeat: OGRFeatureH, iField: libc::c_int) -> OGRGeomFieldDefnH;
}
extern "C" {
pub fn OGR_F_GetGeomFieldIndex(hFeat: OGRFeatureH, pszName: *const libc::c_char)
-> libc::c_int;
}
extern "C" {
pub fn OGR_F_GetGeomFieldRef(hFeat: OGRFeatureH, iField: libc::c_int) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_F_SetGeomFieldDirectly(
hFeat: OGRFeatureH,
iField: libc::c_int,
hGeom: OGRGeometryH,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_F_SetGeomField(
hFeat: OGRFeatureH,
iField: libc::c_int,
hGeom: OGRGeometryH,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_F_GetFID(arg1: OGRFeatureH) -> GIntBig;
}
extern "C" {
pub fn OGR_F_SetFID(arg1: OGRFeatureH, arg2: GIntBig) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_F_DumpReadable(arg1: OGRFeatureH, arg2: *mut FILE);
}
extern "C" {
pub fn OGR_F_SetFrom(arg1: OGRFeatureH, arg2: OGRFeatureH, arg3: libc::c_int) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_F_SetFromWithMap(
arg1: OGRFeatureH,
arg2: OGRFeatureH,
arg3: libc::c_int,
arg4: *const libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_F_GetStyleString(arg1: OGRFeatureH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_F_SetStyleString(arg1: OGRFeatureH, arg2: *const libc::c_char);
}
extern "C" {
pub fn OGR_F_SetStyleStringDirectly(arg1: OGRFeatureH, arg2: *mut libc::c_char);
}
extern "C" {
pub fn OGR_F_GetStyleTable(arg1: OGRFeatureH) -> OGRStyleTableH;
}
extern "C" {
pub fn OGR_F_SetStyleTableDirectly(arg1: OGRFeatureH, arg2: OGRStyleTableH);
}
extern "C" {
pub fn OGR_F_SetStyleTable(arg1: OGRFeatureH, arg2: OGRStyleTableH);
}
extern "C" {
pub fn OGR_F_GetNativeData(arg1: OGRFeatureH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_F_SetNativeData(arg1: OGRFeatureH, arg2: *const libc::c_char);
}
extern "C" {
pub fn OGR_F_GetNativeMediaType(arg1: OGRFeatureH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_F_SetNativeMediaType(arg1: OGRFeatureH, arg2: *const libc::c_char);
}
extern "C" {
pub fn OGR_F_FillUnsetWithDefault(
hFeat: OGRFeatureH,
bNotNullableOnly: libc::c_int,
papszOptions: *mut *mut libc::c_char,
);
}
extern "C" {
pub fn OGR_F_Validate(
arg1: OGRFeatureH,
nValidateFlags: libc::c_int,
bEmitError: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_FldDomain_Destroy(arg1: OGRFieldDomainH);
}
extern "C" {
pub fn OGR_FldDomain_GetName(arg1: OGRFieldDomainH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_FldDomain_GetDescription(arg1: OGRFieldDomainH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_FldDomain_GetDomainType(arg1: OGRFieldDomainH) -> OGRFieldDomainType::Type;
}
extern "C" {
pub fn OGR_FldDomain_GetFieldType(arg1: OGRFieldDomainH) -> OGRFieldType::Type;
}
extern "C" {
pub fn OGR_FldDomain_GetFieldSubType(arg1: OGRFieldDomainH) -> OGRFieldSubType::Type;
}
extern "C" {
pub fn OGR_FldDomain_GetSplitPolicy(arg1: OGRFieldDomainH) -> OGRFieldDomainSplitPolicy::Type;
}
extern "C" {
pub fn OGR_FldDomain_SetSplitPolicy(
arg1: OGRFieldDomainH,
arg2: OGRFieldDomainSplitPolicy::Type,
);
}
extern "C" {
pub fn OGR_FldDomain_GetMergePolicy(arg1: OGRFieldDomainH) -> OGRFieldDomainMergePolicy::Type;
}
extern "C" {
pub fn OGR_FldDomain_SetMergePolicy(
arg1: OGRFieldDomainH,
arg2: OGRFieldDomainMergePolicy::Type,
);
}
extern "C" {
pub fn OGR_CodedFldDomain_Create(
pszName: *const libc::c_char,
pszDescription: *const libc::c_char,
eFieldType: OGRFieldType::Type,
eFieldSubType: OGRFieldSubType::Type,
enumeration: *const OGRCodedValue,
) -> OGRFieldDomainH;
}
extern "C" {
pub fn OGR_CodedFldDomain_GetEnumeration(arg1: OGRFieldDomainH) -> *const OGRCodedValue;
}
extern "C" {
pub fn OGR_RangeFldDomain_Create(
pszName: *const libc::c_char,
pszDescription: *const libc::c_char,
eFieldType: OGRFieldType::Type,
eFieldSubType: OGRFieldSubType::Type,
psMin: *const OGRField,
bMinIsInclusive: bool,
psMax: *const OGRField,
bMaxIsInclusive: bool,
) -> OGRFieldDomainH;
}
extern "C" {
pub fn OGR_RangeFldDomain_GetMin(
arg1: OGRFieldDomainH,
pbIsInclusiveOut: *mut bool,
) -> *const OGRField;
}
extern "C" {
pub fn OGR_RangeFldDomain_GetMax(
arg1: OGRFieldDomainH,
pbIsInclusiveOut: *mut bool,
) -> *const OGRField;
}
extern "C" {
pub fn OGR_GlobFldDomain_Create(
pszName: *const libc::c_char,
pszDescription: *const libc::c_char,
eFieldType: OGRFieldType::Type,
eFieldSubType: OGRFieldSubType::Type,
pszGlob: *const libc::c_char,
) -> OGRFieldDomainH;
}
extern "C" {
pub fn OGR_GlobFldDomain_GetGlob(arg1: OGRFieldDomainH) -> *const libc::c_char;
}
pub type OGRLayerH = *mut libc::c_void;
pub type OGRDataSourceH = *mut libc::c_void;
pub type OGRSFDriverH = *mut libc::c_void;
extern "C" {
pub fn OGR_L_GetName(arg1: OGRLayerH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_L_GetGeomType(arg1: OGRLayerH) -> OGRwkbGeometryType::Type;
}
extern "C" {
pub fn OGR_L_GetSpatialFilter(arg1: OGRLayerH) -> OGRGeometryH;
}
extern "C" {
pub fn OGR_L_SetSpatialFilter(arg1: OGRLayerH, arg2: OGRGeometryH);
}
extern "C" {
pub fn OGR_L_SetSpatialFilterRect(arg1: OGRLayerH, arg2: f64, arg3: f64, arg4: f64, arg5: f64);
}
extern "C" {
pub fn OGR_L_SetSpatialFilterEx(arg1: OGRLayerH, iGeomField: libc::c_int, hGeom: OGRGeometryH);
}
extern "C" {
pub fn OGR_L_SetSpatialFilterRectEx(
arg1: OGRLayerH,
iGeomField: libc::c_int,
dfMinX: f64,
dfMinY: f64,
dfMaxX: f64,
dfMaxY: f64,
);
}
extern "C" {
pub fn OGR_L_SetAttributeFilter(arg1: OGRLayerH, arg2: *const libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_ResetReading(arg1: OGRLayerH);
}
extern "C" {
pub fn OGR_L_GetNextFeature(arg1: OGRLayerH) -> OGRFeatureH;
}
extern "C" {
pub fn OGR_L_SetNextByIndex(arg1: OGRLayerH, arg2: GIntBig) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_GetFeature(arg1: OGRLayerH, arg2: GIntBig) -> OGRFeatureH;
}
extern "C" {
pub fn OGR_L_SetFeature(arg1: OGRLayerH, arg2: OGRFeatureH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_CreateFeature(arg1: OGRLayerH, arg2: OGRFeatureH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_DeleteFeature(arg1: OGRLayerH, arg2: GIntBig) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_GetLayerDefn(arg1: OGRLayerH) -> OGRFeatureDefnH;
}
extern "C" {
pub fn OGR_L_GetSpatialRef(arg1: OGRLayerH) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn OGR_L_FindFieldIndex(
arg1: OGRLayerH,
arg2: *const libc::c_char,
bExactMatch: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_L_GetFeatureCount(arg1: OGRLayerH, arg2: libc::c_int) -> GIntBig;
}
extern "C" {
pub fn OGR_L_GetExtent(
arg1: OGRLayerH,
arg2: *mut OGREnvelope,
arg3: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_GetExtentEx(
arg1: OGRLayerH,
iGeomField: libc::c_int,
psExtent: *mut OGREnvelope,
bForce: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_TestCapability(arg1: OGRLayerH, arg2: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn OGR_L_CreateField(
arg1: OGRLayerH,
arg2: OGRFieldDefnH,
arg3: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_CreateGeomField(
hLayer: OGRLayerH,
hFieldDefn: OGRGeomFieldDefnH,
bForce: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_DeleteField(arg1: OGRLayerH, iField: libc::c_int) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_ReorderFields(arg1: OGRLayerH, panMap: *mut libc::c_int) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_ReorderField(
arg1: OGRLayerH,
iOldFieldPos: libc::c_int,
iNewFieldPos: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_AlterFieldDefn(
arg1: OGRLayerH,
iField: libc::c_int,
hNewFieldDefn: OGRFieldDefnH,
nFlags: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_StartTransaction(arg1: OGRLayerH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_CommitTransaction(arg1: OGRLayerH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_RollbackTransaction(arg1: OGRLayerH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_Reference(arg1: OGRLayerH) -> libc::c_int;
}
extern "C" {
pub fn OGR_L_Dereference(arg1: OGRLayerH) -> libc::c_int;
}
extern "C" {
pub fn OGR_L_GetRefCount(arg1: OGRLayerH) -> libc::c_int;
}
extern "C" {
pub fn OGR_L_SyncToDisk(arg1: OGRLayerH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_GetFeaturesRead(arg1: OGRLayerH) -> GIntBig;
}
extern "C" {
pub fn OGR_L_GetFIDColumn(arg1: OGRLayerH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_L_GetGeometryColumn(arg1: OGRLayerH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_L_GetStyleTable(arg1: OGRLayerH) -> OGRStyleTableH;
}
extern "C" {
pub fn OGR_L_SetStyleTableDirectly(arg1: OGRLayerH, arg2: OGRStyleTableH);
}
extern "C" {
pub fn OGR_L_SetStyleTable(arg1: OGRLayerH, arg2: OGRStyleTableH);
}
extern "C" {
pub fn OGR_L_SetIgnoredFields(arg1: OGRLayerH, arg2: *mut *const libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_Intersection(
arg1: OGRLayerH,
arg2: OGRLayerH,
arg3: OGRLayerH,
arg4: *mut *mut libc::c_char,
arg5: GDALProgressFunc,
arg6: *mut libc::c_void,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_Union(
arg1: OGRLayerH,
arg2: OGRLayerH,
arg3: OGRLayerH,
arg4: *mut *mut libc::c_char,
arg5: GDALProgressFunc,
arg6: *mut libc::c_void,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_SymDifference(
arg1: OGRLayerH,
arg2: OGRLayerH,
arg3: OGRLayerH,
arg4: *mut *mut libc::c_char,
arg5: GDALProgressFunc,
arg6: *mut libc::c_void,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_Identity(
arg1: OGRLayerH,
arg2: OGRLayerH,
arg3: OGRLayerH,
arg4: *mut *mut libc::c_char,
arg5: GDALProgressFunc,
arg6: *mut libc::c_void,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_Update(
arg1: OGRLayerH,
arg2: OGRLayerH,
arg3: OGRLayerH,
arg4: *mut *mut libc::c_char,
arg5: GDALProgressFunc,
arg6: *mut libc::c_void,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_Clip(
arg1: OGRLayerH,
arg2: OGRLayerH,
arg3: OGRLayerH,
arg4: *mut *mut libc::c_char,
arg5: GDALProgressFunc,
arg6: *mut libc::c_void,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_L_Erase(
arg1: OGRLayerH,
arg2: OGRLayerH,
arg3: OGRLayerH,
arg4: *mut *mut libc::c_char,
arg5: GDALProgressFunc,
arg6: *mut libc::c_void,
) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_DS_Destroy(arg1: OGRDataSourceH);
}
extern "C" {
pub fn OGR_DS_GetName(arg1: OGRDataSourceH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_DS_GetLayerCount(arg1: OGRDataSourceH) -> libc::c_int;
}
extern "C" {
pub fn OGR_DS_GetLayer(arg1: OGRDataSourceH, arg2: libc::c_int) -> OGRLayerH;
}
extern "C" {
pub fn OGR_DS_GetLayerByName(arg1: OGRDataSourceH, arg2: *const libc::c_char) -> OGRLayerH;
}
extern "C" {
pub fn OGR_DS_DeleteLayer(arg1: OGRDataSourceH, arg2: libc::c_int) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_DS_GetDriver(arg1: OGRDataSourceH) -> OGRSFDriverH;
}
extern "C" {
pub fn OGR_DS_CreateLayer(
arg1: OGRDataSourceH,
arg2: *const libc::c_char,
arg3: OGRSpatialReferenceH,
arg4: OGRwkbGeometryType::Type,
arg5: *mut *mut libc::c_char,
) -> OGRLayerH;
}
extern "C" {
pub fn OGR_DS_CopyLayer(
arg1: OGRDataSourceH,
arg2: OGRLayerH,
arg3: *const libc::c_char,
arg4: *mut *mut libc::c_char,
) -> OGRLayerH;
}
extern "C" {
pub fn OGR_DS_TestCapability(arg1: OGRDataSourceH, arg2: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn OGR_DS_ExecuteSQL(
arg1: OGRDataSourceH,
arg2: *const libc::c_char,
arg3: OGRGeometryH,
arg4: *const libc::c_char,
) -> OGRLayerH;
}
extern "C" {
pub fn OGR_DS_ReleaseResultSet(arg1: OGRDataSourceH, arg2: OGRLayerH);
}
extern "C" {
pub fn OGR_DS_Reference(arg1: OGRDataSourceH) -> libc::c_int;
}
extern "C" {
pub fn OGR_DS_Dereference(arg1: OGRDataSourceH) -> libc::c_int;
}
extern "C" {
pub fn OGR_DS_GetRefCount(arg1: OGRDataSourceH) -> libc::c_int;
}
extern "C" {
pub fn OGR_DS_GetSummaryRefCount(arg1: OGRDataSourceH) -> libc::c_int;
}
extern "C" {
pub fn OGR_DS_SyncToDisk(arg1: OGRDataSourceH) -> OGRErr::Type;
}
extern "C" {
pub fn OGR_DS_GetStyleTable(arg1: OGRDataSourceH) -> OGRStyleTableH;
}
extern "C" {
pub fn OGR_DS_SetStyleTableDirectly(arg1: OGRDataSourceH, arg2: OGRStyleTableH);
}
extern "C" {
pub fn OGR_DS_SetStyleTable(arg1: OGRDataSourceH, arg2: OGRStyleTableH);
}
extern "C" {
pub fn OGR_Dr_GetName(arg1: OGRSFDriverH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_Dr_Open(
arg1: OGRSFDriverH,
arg2: *const libc::c_char,
arg3: libc::c_int,
) -> OGRDataSourceH;
}
extern "C" {
pub fn OGR_Dr_TestCapability(arg1: OGRSFDriverH, arg2: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn OGR_Dr_CreateDataSource(
arg1: OGRSFDriverH,
arg2: *const libc::c_char,
arg3: *mut *mut libc::c_char,
) -> OGRDataSourceH;
}
extern "C" {
pub fn OGR_Dr_CopyDataSource(
arg1: OGRSFDriverH,
arg2: OGRDataSourceH,
arg3: *const libc::c_char,
arg4: *mut *mut libc::c_char,
) -> OGRDataSourceH;
}
extern "C" {
pub fn OGR_Dr_DeleteDataSource(arg1: OGRSFDriverH, arg2: *const libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OGROpen(
arg1: *const libc::c_char,
arg2: libc::c_int,
arg3: *mut OGRSFDriverH,
) -> OGRDataSourceH;
}
extern "C" {
pub fn OGROpenShared(
arg1: *const libc::c_char,
arg2: libc::c_int,
arg3: *mut OGRSFDriverH,
) -> OGRDataSourceH;
}
extern "C" {
pub fn OGRReleaseDataSource(arg1: OGRDataSourceH) -> OGRErr::Type;
}
extern "C" {
pub fn OGRRegisterDriver(arg1: OGRSFDriverH);
}
extern "C" {
pub fn OGRDeregisterDriver(arg1: OGRSFDriverH);
}
extern "C" {
pub fn OGRGetDriverCount() -> libc::c_int;
}
extern "C" {
pub fn OGRGetDriver(arg1: libc::c_int) -> OGRSFDriverH;
}
extern "C" {
pub fn OGRGetDriverByName(arg1: *const libc::c_char) -> OGRSFDriverH;
}
extern "C" {
pub fn OGRGetOpenDSCount() -> libc::c_int;
}
extern "C" {
pub fn OGRGetOpenDS(iDS: libc::c_int) -> OGRDataSourceH;
}
extern "C" {
pub fn OGRRegisterAll();
}
extern "C" {
pub fn OGRCleanupAll();
}
pub type OGRStyleMgrH = *mut libc::c_void;
pub type OGRStyleToolH = *mut libc::c_void;
extern "C" {
pub fn OGR_SM_Create(hStyleTable: OGRStyleTableH) -> OGRStyleMgrH;
}
extern "C" {
pub fn OGR_SM_Destroy(hSM: OGRStyleMgrH);
}
extern "C" {
pub fn OGR_SM_InitFromFeature(hSM: OGRStyleMgrH, hFeat: OGRFeatureH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_SM_InitStyleString(
hSM: OGRStyleMgrH,
pszStyleString: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_SM_GetPartCount(
hSM: OGRStyleMgrH,
pszStyleString: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_SM_GetPart(
hSM: OGRStyleMgrH,
nPartId: libc::c_int,
pszStyleString: *const libc::c_char,
) -> OGRStyleToolH;
}
extern "C" {
pub fn OGR_SM_AddPart(hSM: OGRStyleMgrH, hST: OGRStyleToolH) -> libc::c_int;
}
extern "C" {
pub fn OGR_SM_AddStyle(
hSM: OGRStyleMgrH,
pszStyleName: *const libc::c_char,
pszStyleString: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_ST_Create(eClassId: OGRSTClassId) -> OGRStyleToolH;
}
extern "C" {
pub fn OGR_ST_Destroy(hST: OGRStyleToolH);
}
extern "C" {
pub fn OGR_ST_GetType(hST: OGRStyleToolH) -> OGRSTClassId;
}
extern "C" {
pub fn OGR_ST_GetUnit(hST: OGRStyleToolH) -> OGRSTUnitId;
}
extern "C" {
pub fn OGR_ST_SetUnit(hST: OGRStyleToolH, eUnit: OGRSTUnitId, dfGroundPaperScale: f64);
}
extern "C" {
pub fn OGR_ST_GetParamStr(
hST: OGRStyleToolH,
eParam: libc::c_int,
bValueIsNull: *mut libc::c_int,
) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_ST_GetParamNum(
hST: OGRStyleToolH,
eParam: libc::c_int,
bValueIsNull: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_ST_GetParamDbl(
hST: OGRStyleToolH,
eParam: libc::c_int,
bValueIsNull: *mut libc::c_int,
) -> f64;
}
extern "C" {
pub fn OGR_ST_SetParamStr(
hST: OGRStyleToolH,
eParam: libc::c_int,
pszValue: *const libc::c_char,
);
}
extern "C" {
pub fn OGR_ST_SetParamNum(hST: OGRStyleToolH, eParam: libc::c_int, nValue: libc::c_int);
}
extern "C" {
pub fn OGR_ST_SetParamDbl(hST: OGRStyleToolH, eParam: libc::c_int, dfValue: f64);
}
extern "C" {
pub fn OGR_ST_GetStyleString(hST: OGRStyleToolH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_ST_GetRGBFromString(
hST: OGRStyleToolH,
pszColor: *const libc::c_char,
pnRed: *mut libc::c_int,
pnGreen: *mut libc::c_int,
pnBlue: *mut libc::c_int,
pnAlpha: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_STBL_Create() -> OGRStyleTableH;
}
extern "C" {
pub fn OGR_STBL_Destroy(hSTBL: OGRStyleTableH);
}
extern "C" {
pub fn OGR_STBL_AddStyle(
hStyleTable: OGRStyleTableH,
pszName: *const libc::c_char,
pszStyleString: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_STBL_SaveStyleTable(
hStyleTable: OGRStyleTableH,
pszFilename: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_STBL_LoadStyleTable(
hStyleTable: OGRStyleTableH,
pszFilename: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn OGR_STBL_Find(
hStyleTable: OGRStyleTableH,
pszName: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_STBL_ResetStyleStringReading(hStyleTable: OGRStyleTableH);
}
extern "C" {
pub fn OGR_STBL_GetNextStyle(hStyleTable: OGRStyleTableH) -> *const libc::c_char;
}
extern "C" {
pub fn OGR_STBL_GetLastStyleName(hStyleTable: OGRStyleTableH) -> *const libc::c_char;
}
pub mod GDALDataType {
pub type Type = libc::c_uint;
pub const GDT_Unknown: Type = 0;
pub const GDT_Byte: Type = 1;
pub const GDT_UInt16: Type = 2;
pub const GDT_Int16: Type = 3;
pub const GDT_UInt32: Type = 4;
pub const GDT_Int32: Type = 5;
pub const GDT_Float32: Type = 6;
pub const GDT_Float64: Type = 7;
pub const GDT_CInt16: Type = 8;
pub const GDT_CInt32: Type = 9;
pub const GDT_CFloat32: Type = 10;
pub const GDT_CFloat64: Type = 11;
pub const GDT_TypeCount: Type = 12;
}
extern "C" {
pub fn GDALGetDataTypeSize(arg1: GDALDataType::Type) -> libc::c_int;
}
extern "C" {
pub fn GDALGetDataTypeSizeBits(eDataType: GDALDataType::Type) -> libc::c_int;
}
extern "C" {
pub fn GDALGetDataTypeSizeBytes(arg1: GDALDataType::Type) -> libc::c_int;
}
extern "C" {
pub fn GDALDataTypeIsComplex(arg1: GDALDataType::Type) -> libc::c_int;
}
extern "C" {
pub fn GDALDataTypeIsInteger(arg1: GDALDataType::Type) -> libc::c_int;
}
extern "C" {
pub fn GDALDataTypeIsFloating(arg1: GDALDataType::Type) -> libc::c_int;
}
extern "C" {
pub fn GDALDataTypeIsSigned(arg1: GDALDataType::Type) -> libc::c_int;
}
extern "C" {
pub fn GDALGetDataTypeName(arg1: GDALDataType::Type) -> *const libc::c_char;
}
extern "C" {
pub fn GDALGetDataTypeByName(arg1: *const libc::c_char) -> GDALDataType::Type;
}
extern "C" {
pub fn GDALDataTypeUnion(
arg1: GDALDataType::Type,
arg2: GDALDataType::Type,
) -> GDALDataType::Type;
}
extern "C" {
pub fn GDALDataTypeUnionWithValue(
eDT: GDALDataType::Type,
dValue: f64,
bComplex: libc::c_int,
) -> GDALDataType::Type;
}
extern "C" {
pub fn GDALFindDataType(
nBits: libc::c_int,
bSigned: libc::c_int,
bFloating: libc::c_int,
bComplex: libc::c_int,
) -> GDALDataType::Type;
}
extern "C" {
pub fn GDALFindDataTypeForValue(dValue: f64, bComplex: libc::c_int) -> GDALDataType::Type;
}
extern "C" {
pub fn GDALAdjustValueToDataType(
eDT: GDALDataType::Type,
dfValue: f64,
pbClamped: *mut libc::c_int,
pbRounded: *mut libc::c_int,
) -> f64;
}
extern "C" {
pub fn GDALGetNonComplexDataType(arg1: GDALDataType::Type) -> GDALDataType::Type;
}
extern "C" {
pub fn GDALDataTypeIsConversionLossy(
eTypeFrom: GDALDataType::Type,
eTypeTo: GDALDataType::Type,
) -> libc::c_int;
}
pub mod GDALAsyncStatusType {
pub type Type = libc::c_uint;
pub const GARIO_PENDING: Type = 0;
pub const GARIO_UPDATE: Type = 1;
pub const GARIO_ERROR: Type = 2;
pub const GARIO_COMPLETE: Type = 3;
pub const GARIO_TypeCount: Type = 4;
}
extern "C" {
pub fn GDALGetAsyncStatusTypeName(arg1: GDALAsyncStatusType::Type) -> *const libc::c_char;
}
extern "C" {
pub fn GDALGetAsyncStatusTypeByName(arg1: *const libc::c_char) -> GDALAsyncStatusType::Type;
}
pub mod GDALAccess {
pub type Type = libc::c_uint;
pub const GA_ReadOnly: Type = 0;
pub const GA_Update: Type = 1;
}
pub mod GDALRWFlag {
pub type Type = libc::c_uint;
pub const GF_Read: Type = 0;
pub const GF_Write: Type = 1;
}
pub mod GDALRIOResampleAlg {
pub type Type = libc::c_uint;
pub const GRIORA_NearestNeighbour: Type = 0;
pub const GRIORA_Bilinear: Type = 1;
pub const GRIORA_Cubic: Type = 2;
pub const GRIORA_CubicSpline: Type = 3;
pub const GRIORA_Lanczos: Type = 4;
pub const GRIORA_Average: Type = 5;
pub const GRIORA_Mode: Type = 6;
pub const GRIORA_Gauss: Type = 7;
pub const GRIORA_RESERVED_START: Type = 8;
pub const GRIORA_RESERVED_END: Type = 13;
pub const GRIORA_RMS: Type = 14;
pub const GRIORA_LAST: Type = 14;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALRasterIOExtraArg {
pub nVersion: libc::c_int,
pub eResampleAlg: GDALRIOResampleAlg::Type,
pub pfnProgress: GDALProgressFunc,
pub pProgressData: *mut libc::c_void,
pub bFloatingPointWindowValidity: libc::c_int,
pub dfXOff: f64,
pub dfYOff: f64,
pub dfXSize: f64,
pub dfYSize: f64,
}
#[test]
fn bindgen_test_layout_GDALRasterIOExtraArg() {
assert_eq!(
::std::mem::size_of::<GDALRasterIOExtraArg>(),
64usize,
concat!("Size of: ", stringify!(GDALRasterIOExtraArg))
);
assert_eq!(
::std::mem::align_of::<GDALRasterIOExtraArg>(),
8usize,
concat!("Alignment of ", stringify!(GDALRasterIOExtraArg))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRasterIOExtraArg>())).nVersion as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(GDALRasterIOExtraArg),
"::",
stringify!(nVersion)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALRasterIOExtraArg>())).eResampleAlg as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(GDALRasterIOExtraArg),
"::",
stringify!(eResampleAlg)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALRasterIOExtraArg>())).pfnProgress as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(GDALRasterIOExtraArg),
"::",
stringify!(pfnProgress)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALRasterIOExtraArg>())).pProgressData as *const _ as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(GDALRasterIOExtraArg),
"::",
stringify!(pProgressData)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALRasterIOExtraArg>())).bFloatingPointWindowValidity
as *const _ as usize
},
24usize,
concat!(
"Offset of field: ",
stringify!(GDALRasterIOExtraArg),
"::",
stringify!(bFloatingPointWindowValidity)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRasterIOExtraArg>())).dfXOff as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(GDALRasterIOExtraArg),
"::",
stringify!(dfXOff)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRasterIOExtraArg>())).dfYOff as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(GDALRasterIOExtraArg),
"::",
stringify!(dfYOff)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRasterIOExtraArg>())).dfXSize as *const _ as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(GDALRasterIOExtraArg),
"::",
stringify!(dfXSize)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRasterIOExtraArg>())).dfYSize as *const _ as usize },
56usize,
concat!(
"Offset of field: ",
stringify!(GDALRasterIOExtraArg),
"::",
stringify!(dfYSize)
)
);
}
pub mod GDALColorInterp {
pub type Type = libc::c_uint;
pub const GCI_Undefined: Type = 0;
pub const GCI_GrayIndex: Type = 1;
pub const GCI_PaletteIndex: Type = 2;
pub const GCI_RedBand: Type = 3;
pub const GCI_GreenBand: Type = 4;
pub const GCI_BlueBand: Type = 5;
pub const GCI_AlphaBand: Type = 6;
pub const GCI_HueBand: Type = 7;
pub const GCI_SaturationBand: Type = 8;
pub const GCI_LightnessBand: Type = 9;
pub const GCI_CyanBand: Type = 10;
pub const GCI_MagentaBand: Type = 11;
pub const GCI_YellowBand: Type = 12;
pub const GCI_BlackBand: Type = 13;
pub const GCI_YCbCr_YBand: Type = 14;
pub const GCI_YCbCr_CbBand: Type = 15;
pub const GCI_YCbCr_CrBand: Type = 16;
pub const GCI_Max: Type = 16;
}
extern "C" {
pub fn GDALGetColorInterpretationName(arg1: GDALColorInterp::Type) -> *const libc::c_char;
}
extern "C" {
pub fn GDALGetColorInterpretationByName(pszName: *const libc::c_char) -> GDALColorInterp::Type;
}
pub mod GDALPaletteInterp {
pub type Type = libc::c_uint;
pub const GPI_Gray: Type = 0;
pub const GPI_RGB: Type = 1;
pub const GPI_CMYK: Type = 2;
pub const GPI_HLS: Type = 3;
}
extern "C" {
pub fn GDALGetPaletteInterpretationName(arg1: GDALPaletteInterp::Type) -> *const libc::c_char;
}
pub type GDALMajorObjectH = *mut libc::c_void;
pub type GDALDatasetH = *mut libc::c_void;
pub type GDALRasterBandH = *mut libc::c_void;
pub type GDALDriverH = *mut libc::c_void;
pub type GDALColorTableH = *mut libc::c_void;
pub type GDALRasterAttributeTableH = *mut libc::c_void;
pub type GDALAsyncReaderH = *mut libc::c_void;
pub type GSpacing = GIntBig;
pub mod GDALExtendedDataTypeClass {
pub type Type = libc::c_uint;
pub const GEDTC_NUMERIC: Type = 0;
pub const GEDTC_STRING: Type = 1;
pub const GEDTC_COMPOUND: Type = 2;
}
pub mod GDALExtendedDataTypeSubType {
pub type Type = libc::c_uint;
pub const GEDTST_NONE: Type = 0;
pub const GEDTST_JSON: Type = 1;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALExtendedDataTypeHS {
_unused: [u8; 0],
}
pub type GDALExtendedDataTypeH = *mut GDALExtendedDataTypeHS;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALEDTComponentHS {
_unused: [u8; 0],
}
pub type GDALEDTComponentH = *mut GDALEDTComponentHS;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALGroupHS {
_unused: [u8; 0],
}
pub type GDALGroupH = *mut GDALGroupHS;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALMDArrayHS {
_unused: [u8; 0],
}
pub type GDALMDArrayH = *mut GDALMDArrayHS;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALAttributeHS {
_unused: [u8; 0],
}
pub type GDALAttributeH = *mut GDALAttributeHS;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALDimensionHS {
_unused: [u8; 0],
}
pub type GDALDimensionH = *mut GDALDimensionHS;
extern "C" {
pub fn GDALAllRegister();
}
extern "C" {
pub fn GDALCreate(
hDriver: GDALDriverH,
arg1: *const libc::c_char,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: libc::c_int,
arg5: GDALDataType::Type,
arg6: CSLConstList,
) -> GDALDatasetH;
}
extern "C" {
pub fn GDALCreateCopy(
arg1: GDALDriverH,
arg2: *const libc::c_char,
arg3: GDALDatasetH,
arg4: libc::c_int,
arg5: CSLConstList,
arg6: GDALProgressFunc,
arg7: *mut libc::c_void,
) -> GDALDatasetH;
}
extern "C" {
pub fn GDALIdentifyDriver(
pszFilename: *const libc::c_char,
papszFileList: CSLConstList,
) -> GDALDriverH;
}
extern "C" {
pub fn GDALIdentifyDriverEx(
pszFilename: *const libc::c_char,
nIdentifyFlags: libc::c_uint,
papszAllowedDrivers: *const *const libc::c_char,
papszFileList: *const *const libc::c_char,
) -> GDALDriverH;
}
extern "C" {
pub fn GDALOpen(pszFilename: *const libc::c_char, eAccess: GDALAccess::Type) -> GDALDatasetH;
}
extern "C" {
pub fn GDALOpenShared(arg1: *const libc::c_char, arg2: GDALAccess::Type) -> GDALDatasetH;
}
extern "C" {
pub fn GDALOpenEx(
pszFilename: *const libc::c_char,
nOpenFlags: libc::c_uint,
papszAllowedDrivers: *const *const libc::c_char,
papszOpenOptions: *const *const libc::c_char,
papszSiblingFiles: *const *const libc::c_char,
) -> GDALDatasetH;
}
extern "C" {
pub fn GDALDumpOpenDatasets(arg1: *mut FILE) -> libc::c_int;
}
extern "C" {
pub fn GDALGetDriverByName(arg1: *const libc::c_char) -> GDALDriverH;
}
extern "C" {
pub fn GDALGetDriverCount() -> libc::c_int;
}
extern "C" {
pub fn GDALGetDriver(arg1: libc::c_int) -> GDALDriverH;
}
extern "C" {
pub fn GDALCreateDriver() -> GDALDriverH;
}
extern "C" {
pub fn GDALDestroyDriver(arg1: GDALDriverH);
}
extern "C" {
pub fn GDALRegisterDriver(arg1: GDALDriverH) -> libc::c_int;
}
extern "C" {
pub fn GDALDeregisterDriver(arg1: GDALDriverH);
}
extern "C" {
pub fn GDALDestroyDriverManager();
}
extern "C" {
pub fn GDALDestroy();
}
extern "C" {
pub fn GDALDeleteDataset(arg1: GDALDriverH, arg2: *const libc::c_char) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRenameDataset(
arg1: GDALDriverH,
pszNewName: *const libc::c_char,
pszOldName: *const libc::c_char,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALCopyDatasetFiles(
arg1: GDALDriverH,
pszNewName: *const libc::c_char,
pszOldName: *const libc::c_char,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALValidateCreationOptions(
arg1: GDALDriverH,
papszCreationOptions: CSLConstList,
) -> libc::c_int;
}
extern "C" {
pub fn GDALGetDriverShortName(arg1: GDALDriverH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALGetDriverLongName(arg1: GDALDriverH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALGetDriverHelpTopic(arg1: GDALDriverH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALGetDriverCreationOptionList(arg1: GDALDriverH) -> *const libc::c_char;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDAL_GCP {
pub pszId: *mut libc::c_char,
pub pszInfo: *mut libc::c_char,
pub dfGCPPixel: f64,
pub dfGCPLine: f64,
pub dfGCPX: f64,
pub dfGCPY: f64,
pub dfGCPZ: f64,
}
#[test]
fn bindgen_test_layout_GDAL_GCP() {
assert_eq!(
::std::mem::size_of::<GDAL_GCP>(),
56usize,
concat!("Size of: ", stringify!(GDAL_GCP))
);
assert_eq!(
::std::mem::align_of::<GDAL_GCP>(),
8usize,
concat!("Alignment of ", stringify!(GDAL_GCP))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDAL_GCP>())).pszId as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(GDAL_GCP),
"::",
stringify!(pszId)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDAL_GCP>())).pszInfo as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(GDAL_GCP),
"::",
stringify!(pszInfo)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDAL_GCP>())).dfGCPPixel as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(GDAL_GCP),
"::",
stringify!(dfGCPPixel)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDAL_GCP>())).dfGCPLine as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(GDAL_GCP),
"::",
stringify!(dfGCPLine)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDAL_GCP>())).dfGCPX as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(GDAL_GCP),
"::",
stringify!(dfGCPX)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDAL_GCP>())).dfGCPY as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(GDAL_GCP),
"::",
stringify!(dfGCPY)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDAL_GCP>())).dfGCPZ as *const _ as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(GDAL_GCP),
"::",
stringify!(dfGCPZ)
)
);
}
extern "C" {
pub fn GDALInitGCPs(arg1: libc::c_int, arg2: *mut GDAL_GCP);
}
extern "C" {
pub fn GDALDeinitGCPs(arg1: libc::c_int, arg2: *mut GDAL_GCP);
}
extern "C" {
pub fn GDALDuplicateGCPs(arg1: libc::c_int, arg2: *const GDAL_GCP) -> *mut GDAL_GCP;
}
extern "C" {
pub fn GDALGCPsToGeoTransform(
nGCPCount: libc::c_int,
pasGCPs: *const GDAL_GCP,
padfGeoTransform: *mut f64,
bApproxOK: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALInvGeoTransform(
padfGeoTransformIn: *mut f64,
padfInvGeoTransformOut: *mut f64,
) -> libc::c_int;
}
extern "C" {
pub fn GDALApplyGeoTransform(
arg1: *mut f64,
arg2: f64,
arg3: f64,
arg4: *mut f64,
arg5: *mut f64,
);
}
extern "C" {
pub fn GDALComposeGeoTransforms(
padfGeoTransform1: *const f64,
padfGeoTransform2: *const f64,
padfGeoTransformOut: *mut f64,
);
}
extern "C" {
pub fn GDALGetMetadataDomainList(hObject: GDALMajorObjectH) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn GDALGetMetadata(
arg1: GDALMajorObjectH,
arg2: *const libc::c_char,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn GDALSetMetadata(
arg1: GDALMajorObjectH,
arg2: CSLConstList,
arg3: *const libc::c_char,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetMetadataItem(
arg1: GDALMajorObjectH,
arg2: *const libc::c_char,
arg3: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn GDALSetMetadataItem(
arg1: GDALMajorObjectH,
arg2: *const libc::c_char,
arg3: *const libc::c_char,
arg4: *const libc::c_char,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetDescription(arg1: GDALMajorObjectH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALSetDescription(arg1: GDALMajorObjectH, arg2: *const libc::c_char);
}
extern "C" {
pub fn GDALGetDatasetDriver(arg1: GDALDatasetH) -> GDALDriverH;
}
extern "C" {
pub fn GDALGetFileList(arg1: GDALDatasetH) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn GDALClose(arg1: GDALDatasetH);
}
extern "C" {
pub fn GDALGetRasterXSize(arg1: GDALDatasetH) -> libc::c_int;
}
extern "C" {
pub fn GDALGetRasterYSize(arg1: GDALDatasetH) -> libc::c_int;
}
extern "C" {
pub fn GDALGetRasterCount(arg1: GDALDatasetH) -> libc::c_int;
}
extern "C" {
pub fn GDALGetRasterBand(arg1: GDALDatasetH, arg2: libc::c_int) -> GDALRasterBandH;
}
extern "C" {
pub fn GDALAddBand(
hDS: GDALDatasetH,
eType: GDALDataType::Type,
papszOptions: CSLConstList,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALBeginAsyncReader(
hDS: GDALDatasetH,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
pBuf: *mut libc::c_void,
nBufXSize: libc::c_int,
nBufYSize: libc::c_int,
eBufType: GDALDataType::Type,
nBandCount: libc::c_int,
panBandMap: *mut libc::c_int,
nPixelSpace: libc::c_int,
nLineSpace: libc::c_int,
nBandSpace: libc::c_int,
papszOptions: CSLConstList,
) -> GDALAsyncReaderH;
}
extern "C" {
pub fn GDALEndAsyncReader(hDS: GDALDatasetH, hAsynchReaderH: GDALAsyncReaderH);
}
extern "C" {
pub fn GDALDatasetRasterIO(
hDS: GDALDatasetH,
eRWFlag: GDALRWFlag::Type,
nDSXOff: libc::c_int,
nDSYOff: libc::c_int,
nDSXSize: libc::c_int,
nDSYSize: libc::c_int,
pBuffer: *mut libc::c_void,
nBXSize: libc::c_int,
nBYSize: libc::c_int,
eBDataType: GDALDataType::Type,
nBandCount: libc::c_int,
panBandCount: *mut libc::c_int,
nPixelSpace: libc::c_int,
nLineSpace: libc::c_int,
nBandSpace: libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALDatasetRasterIOEx(
hDS: GDALDatasetH,
eRWFlag: GDALRWFlag::Type,
nDSXOff: libc::c_int,
nDSYOff: libc::c_int,
nDSXSize: libc::c_int,
nDSYSize: libc::c_int,
pBuffer: *mut libc::c_void,
nBXSize: libc::c_int,
nBYSize: libc::c_int,
eBDataType: GDALDataType::Type,
nBandCount: libc::c_int,
panBandCount: *mut libc::c_int,
nPixelSpace: GSpacing,
nLineSpace: GSpacing,
nBandSpace: GSpacing,
psExtraArg: *mut GDALRasterIOExtraArg,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALDatasetAdviseRead(
hDS: GDALDatasetH,
nDSXOff: libc::c_int,
nDSYOff: libc::c_int,
nDSXSize: libc::c_int,
nDSYSize: libc::c_int,
nBXSize: libc::c_int,
nBYSize: libc::c_int,
eBDataType: GDALDataType::Type,
nBandCount: libc::c_int,
panBandCount: *mut libc::c_int,
papszOptions: CSLConstList,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetProjectionRef(arg1: GDALDatasetH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALGetSpatialRef(arg1: GDALDatasetH) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn GDALSetProjection(arg1: GDALDatasetH, arg2: *const libc::c_char) -> CPLErr::Type;
}
extern "C" {
pub fn GDALSetSpatialRef(arg1: GDALDatasetH, arg2: OGRSpatialReferenceH) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetGeoTransform(arg1: GDALDatasetH, arg2: *mut f64) -> CPLErr::Type;
}
extern "C" {
pub fn GDALSetGeoTransform(arg1: GDALDatasetH, arg2: *mut f64) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetGCPCount(arg1: GDALDatasetH) -> libc::c_int;
}
extern "C" {
pub fn GDALGetGCPProjection(arg1: GDALDatasetH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALGetGCPSpatialRef(arg1: GDALDatasetH) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn GDALGetGCPs(arg1: GDALDatasetH) -> *const GDAL_GCP;
}
extern "C" {
pub fn GDALSetGCPs(
arg1: GDALDatasetH,
arg2: libc::c_int,
arg3: *const GDAL_GCP,
arg4: *const libc::c_char,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALSetGCPs2(
arg1: GDALDatasetH,
arg2: libc::c_int,
arg3: *const GDAL_GCP,
arg4: OGRSpatialReferenceH,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetInternalHandle(
arg1: GDALDatasetH,
arg2: *const libc::c_char,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALReferenceDataset(arg1: GDALDatasetH) -> libc::c_int;
}
extern "C" {
pub fn GDALDereferenceDataset(arg1: GDALDatasetH) -> libc::c_int;
}
extern "C" {
pub fn GDALReleaseDataset(arg1: GDALDatasetH) -> libc::c_int;
}
extern "C" {
pub fn GDALBuildOverviews(
arg1: GDALDatasetH,
arg2: *const libc::c_char,
arg3: libc::c_int,
arg4: *mut libc::c_int,
arg5: libc::c_int,
arg6: *mut libc::c_int,
arg7: GDALProgressFunc,
arg8: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetOpenDatasets(hDS: *mut *mut GDALDatasetH, pnCount: *mut libc::c_int);
}
extern "C" {
pub fn GDALGetAccess(hDS: GDALDatasetH) -> libc::c_int;
}
extern "C" {
pub fn GDALFlushCache(hDS: GDALDatasetH);
}
extern "C" {
pub fn GDALCreateDatasetMaskBand(hDS: GDALDatasetH, nFlags: libc::c_int) -> CPLErr::Type;
}
extern "C" {
pub fn GDALDatasetCopyWholeRaster(
hSrcDS: GDALDatasetH,
hDstDS: GDALDatasetH,
papszOptions: CSLConstList,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRasterBandCopyWholeRaster(
hSrcBand: GDALRasterBandH,
hDstBand: GDALRasterBandH,
constpapszOptions: *const *const libc::c_char,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRegenerateOverviews(
hSrcBand: GDALRasterBandH,
nOverviewCount: libc::c_int,
pahOverviewBands: *mut GDALRasterBandH,
pszResampling: *const libc::c_char,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALDatasetGetLayerCount(arg1: GDALDatasetH) -> libc::c_int;
}
extern "C" {
pub fn GDALDatasetGetLayer(arg1: GDALDatasetH, arg2: libc::c_int) -> OGRLayerH;
}
extern "C" {
pub fn GDALDatasetGetLayerByName(arg1: GDALDatasetH, arg2: *const libc::c_char) -> OGRLayerH;
}
extern "C" {
pub fn GDALDatasetIsLayerPrivate(arg1: GDALDatasetH, arg2: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn GDALDatasetDeleteLayer(arg1: GDALDatasetH, arg2: libc::c_int) -> OGRErr::Type;
}
extern "C" {
pub fn GDALDatasetCreateLayer(
arg1: GDALDatasetH,
arg2: *const libc::c_char,
arg3: OGRSpatialReferenceH,
arg4: OGRwkbGeometryType::Type,
arg5: CSLConstList,
) -> OGRLayerH;
}
extern "C" {
pub fn GDALDatasetCopyLayer(
arg1: GDALDatasetH,
arg2: OGRLayerH,
arg3: *const libc::c_char,
arg4: CSLConstList,
) -> OGRLayerH;
}
extern "C" {
pub fn GDALDatasetResetReading(arg1: GDALDatasetH);
}
extern "C" {
pub fn GDALDatasetGetNextFeature(
hDS: GDALDatasetH,
phBelongingLayer: *mut OGRLayerH,
pdfProgressPct: *mut f64,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> OGRFeatureH;
}
extern "C" {
pub fn GDALDatasetTestCapability(arg1: GDALDatasetH, arg2: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn GDALDatasetExecuteSQL(
arg1: GDALDatasetH,
arg2: *const libc::c_char,
arg3: OGRGeometryH,
arg4: *const libc::c_char,
) -> OGRLayerH;
}
extern "C" {
pub fn GDALDatasetAbortSQL(arg1: GDALDatasetH) -> OGRErr::Type;
}
extern "C" {
pub fn GDALDatasetReleaseResultSet(arg1: GDALDatasetH, arg2: OGRLayerH);
}
extern "C" {
pub fn GDALDatasetGetStyleTable(arg1: GDALDatasetH) -> OGRStyleTableH;
}
extern "C" {
pub fn GDALDatasetSetStyleTableDirectly(arg1: GDALDatasetH, arg2: OGRStyleTableH);
}
extern "C" {
pub fn GDALDatasetSetStyleTable(arg1: GDALDatasetH, arg2: OGRStyleTableH);
}
extern "C" {
pub fn GDALDatasetStartTransaction(hDS: GDALDatasetH, bForce: libc::c_int) -> OGRErr::Type;
}
extern "C" {
pub fn GDALDatasetCommitTransaction(hDS: GDALDatasetH) -> OGRErr::Type;
}
extern "C" {
pub fn GDALDatasetRollbackTransaction(hDS: GDALDatasetH) -> OGRErr::Type;
}
extern "C" {
pub fn GDALDatasetClearStatistics(hDS: GDALDatasetH);
}
extern "C" {
pub fn GDALDatasetGetFieldDomain(
hDS: GDALDatasetH,
pszName: *const libc::c_char,
) -> OGRFieldDomainH;
}
extern "C" {
pub fn GDALDatasetAddFieldDomain(
hDS: GDALDatasetH,
hFieldDomain: OGRFieldDomainH,
ppszFailureReason: *mut *mut libc::c_char,
) -> bool;
}
pub type GDALDerivedPixelFunc = ::std::option::Option<
unsafe extern "C" fn(
papoSources: *mut *mut libc::c_void,
nSources: libc::c_int,
pData: *mut libc::c_void,
nBufXSize: libc::c_int,
nBufYSize: libc::c_int,
eSrcType: GDALDataType::Type,
eBufType: GDALDataType::Type,
nPixelSpace: libc::c_int,
nLineSpace: libc::c_int,
) -> CPLErr::Type,
>;
pub type GDALDerivedPixelFuncWithArgs = ::std::option::Option<
unsafe extern "C" fn(
papoSources: *mut *mut libc::c_void,
nSources: libc::c_int,
pData: *mut libc::c_void,
nBufXSize: libc::c_int,
nBufYSize: libc::c_int,
eSrcType: GDALDataType::Type,
eBufType: GDALDataType::Type,
nPixelSpace: libc::c_int,
nLineSpace: libc::c_int,
papszFunctionArgs: CSLConstList,
) -> CPLErr::Type,
>;
extern "C" {
pub fn GDALGetRasterDataType(arg1: GDALRasterBandH) -> GDALDataType::Type;
}
extern "C" {
pub fn GDALGetBlockSize(
arg1: GDALRasterBandH,
pnXSize: *mut libc::c_int,
pnYSize: *mut libc::c_int,
);
}
extern "C" {
pub fn GDALGetActualBlockSize(
arg1: GDALRasterBandH,
nXBlockOff: libc::c_int,
nYBlockOff: libc::c_int,
pnXValid: *mut libc::c_int,
pnYValid: *mut libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRasterAdviseRead(
hRB: GDALRasterBandH,
nDSXOff: libc::c_int,
nDSYOff: libc::c_int,
nDSXSize: libc::c_int,
nDSYSize: libc::c_int,
nBXSize: libc::c_int,
nBYSize: libc::c_int,
eBDataType: GDALDataType::Type,
papszOptions: CSLConstList,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRasterIO(
hRBand: GDALRasterBandH,
eRWFlag: GDALRWFlag::Type,
nDSXOff: libc::c_int,
nDSYOff: libc::c_int,
nDSXSize: libc::c_int,
nDSYSize: libc::c_int,
pBuffer: *mut libc::c_void,
nBXSize: libc::c_int,
nBYSize: libc::c_int,
eBDataType: GDALDataType::Type,
nPixelSpace: libc::c_int,
nLineSpace: libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRasterIOEx(
hRBand: GDALRasterBandH,
eRWFlag: GDALRWFlag::Type,
nDSXOff: libc::c_int,
nDSYOff: libc::c_int,
nDSXSize: libc::c_int,
nDSYSize: libc::c_int,
pBuffer: *mut libc::c_void,
nBXSize: libc::c_int,
nBYSize: libc::c_int,
eBDataType: GDALDataType::Type,
nPixelSpace: GSpacing,
nLineSpace: GSpacing,
psExtraArg: *mut GDALRasterIOExtraArg,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALReadBlock(
arg1: GDALRasterBandH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALWriteBlock(
arg1: GDALRasterBandH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetRasterBandXSize(arg1: GDALRasterBandH) -> libc::c_int;
}
extern "C" {
pub fn GDALGetRasterBandYSize(arg1: GDALRasterBandH) -> libc::c_int;
}
extern "C" {
pub fn GDALGetRasterAccess(arg1: GDALRasterBandH) -> GDALAccess::Type;
}
extern "C" {
pub fn GDALGetBandNumber(arg1: GDALRasterBandH) -> libc::c_int;
}
extern "C" {
pub fn GDALGetBandDataset(arg1: GDALRasterBandH) -> GDALDatasetH;
}
extern "C" {
pub fn GDALGetRasterColorInterpretation(arg1: GDALRasterBandH) -> GDALColorInterp::Type;
}
extern "C" {
pub fn GDALSetRasterColorInterpretation(
arg1: GDALRasterBandH,
arg2: GDALColorInterp::Type,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetRasterColorTable(arg1: GDALRasterBandH) -> GDALColorTableH;
}
extern "C" {
pub fn GDALSetRasterColorTable(arg1: GDALRasterBandH, arg2: GDALColorTableH) -> CPLErr::Type;
}
extern "C" {
pub fn GDALHasArbitraryOverviews(arg1: GDALRasterBandH) -> libc::c_int;
}
extern "C" {
pub fn GDALGetOverviewCount(arg1: GDALRasterBandH) -> libc::c_int;
}
extern "C" {
pub fn GDALGetOverview(arg1: GDALRasterBandH, arg2: libc::c_int) -> GDALRasterBandH;
}
extern "C" {
pub fn GDALGetRasterNoDataValue(arg1: GDALRasterBandH, arg2: *mut libc::c_int) -> f64;
}
extern "C" {
pub fn GDALSetRasterNoDataValue(arg1: GDALRasterBandH, arg2: f64) -> CPLErr::Type;
}
extern "C" {
pub fn GDALDeleteRasterNoDataValue(arg1: GDALRasterBandH) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetRasterCategoryNames(arg1: GDALRasterBandH) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn GDALSetRasterCategoryNames(arg1: GDALRasterBandH, arg2: CSLConstList) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetRasterMinimum(arg1: GDALRasterBandH, pbSuccess: *mut libc::c_int) -> f64;
}
extern "C" {
pub fn GDALGetRasterMaximum(arg1: GDALRasterBandH, pbSuccess: *mut libc::c_int) -> f64;
}
extern "C" {
pub fn GDALGetRasterStatistics(
arg1: GDALRasterBandH,
bApproxOK: libc::c_int,
bForce: libc::c_int,
pdfMin: *mut f64,
pdfMax: *mut f64,
pdfMean: *mut f64,
pdfStdDev: *mut f64,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALComputeRasterStatistics(
arg1: GDALRasterBandH,
bApproxOK: libc::c_int,
pdfMin: *mut f64,
pdfMax: *mut f64,
pdfMean: *mut f64,
pdfStdDev: *mut f64,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALSetRasterStatistics(
hBand: GDALRasterBandH,
dfMin: f64,
dfMax: f64,
dfMean: f64,
dfStdDev: f64,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRasterBandAsMDArray(arg1: GDALRasterBandH) -> GDALMDArrayH;
}
extern "C" {
pub fn GDALGetRasterUnitType(arg1: GDALRasterBandH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALSetRasterUnitType(
hBand: GDALRasterBandH,
pszNewValue: *const libc::c_char,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetRasterOffset(arg1: GDALRasterBandH, pbSuccess: *mut libc::c_int) -> f64;
}
extern "C" {
pub fn GDALSetRasterOffset(hBand: GDALRasterBandH, dfNewOffset: f64) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetRasterScale(arg1: GDALRasterBandH, pbSuccess: *mut libc::c_int) -> f64;
}
extern "C" {
pub fn GDALSetRasterScale(hBand: GDALRasterBandH, dfNewOffset: f64) -> CPLErr::Type;
}
extern "C" {
pub fn GDALComputeRasterMinMax(
hBand: GDALRasterBandH,
bApproxOK: libc::c_int,
adfMinMax: *mut f64,
);
}
extern "C" {
pub fn GDALFlushRasterCache(hBand: GDALRasterBandH) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetRasterHistogram(
hBand: GDALRasterBandH,
dfMin: f64,
dfMax: f64,
nBuckets: libc::c_int,
panHistogram: *mut libc::c_int,
bIncludeOutOfRange: libc::c_int,
bApproxOK: libc::c_int,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetRasterHistogramEx(
hBand: GDALRasterBandH,
dfMin: f64,
dfMax: f64,
nBuckets: libc::c_int,
panHistogram: *mut GUIntBig,
bIncludeOutOfRange: libc::c_int,
bApproxOK: libc::c_int,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetDefaultHistogram(
hBand: GDALRasterBandH,
pdfMin: *mut f64,
pdfMax: *mut f64,
pnBuckets: *mut libc::c_int,
ppanHistogram: *mut *mut libc::c_int,
bForce: libc::c_int,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetDefaultHistogramEx(
hBand: GDALRasterBandH,
pdfMin: *mut f64,
pdfMax: *mut f64,
pnBuckets: *mut libc::c_int,
ppanHistogram: *mut *mut GUIntBig,
bForce: libc::c_int,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALSetDefaultHistogram(
hBand: GDALRasterBandH,
dfMin: f64,
dfMax: f64,
nBuckets: libc::c_int,
panHistogram: *mut libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALSetDefaultHistogramEx(
hBand: GDALRasterBandH,
dfMin: f64,
dfMax: f64,
nBuckets: libc::c_int,
panHistogram: *mut GUIntBig,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetRandomRasterSample(
arg1: GDALRasterBandH,
arg2: libc::c_int,
arg3: *mut f32,
) -> libc::c_int;
}
extern "C" {
pub fn GDALGetRasterSampleOverview(arg1: GDALRasterBandH, arg2: libc::c_int)
-> GDALRasterBandH;
}
extern "C" {
pub fn GDALGetRasterSampleOverviewEx(arg1: GDALRasterBandH, arg2: GUIntBig) -> GDALRasterBandH;
}
extern "C" {
pub fn GDALFillRaster(
hBand: GDALRasterBandH,
dfRealValue: f64,
dfImaginaryValue: f64,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALComputeBandStats(
hBand: GDALRasterBandH,
nSampleStep: libc::c_int,
pdfMean: *mut f64,
pdfStdDev: *mut f64,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALOverviewMagnitudeCorrection(
hBaseBand: GDALRasterBandH,
nOverviewCount: libc::c_int,
pahOverviews: *mut GDALRasterBandH,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetDefaultRAT(hBand: GDALRasterBandH) -> GDALRasterAttributeTableH;
}
extern "C" {
pub fn GDALSetDefaultRAT(
arg1: GDALRasterBandH,
arg2: GDALRasterAttributeTableH,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALAddDerivedBandPixelFunc(
pszName: *const libc::c_char,
pfnPixelFunc: GDALDerivedPixelFunc,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALAddDerivedBandPixelFuncWithArgs(
pszName: *const libc::c_char,
pfnPixelFunc: GDALDerivedPixelFuncWithArgs,
pszMetadata: *const libc::c_char,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetMaskBand(hBand: GDALRasterBandH) -> GDALRasterBandH;
}
extern "C" {
pub fn GDALGetMaskFlags(hBand: GDALRasterBandH) -> libc::c_int;
}
extern "C" {
pub fn GDALCreateMaskBand(hBand: GDALRasterBandH, nFlags: libc::c_int) -> CPLErr::Type;
}
extern "C" {
pub fn GDALGetDataCoverageStatus(
hBand: GDALRasterBandH,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
nMaskFlagStop: libc::c_int,
pdfDataPct: *mut f64,
) -> libc::c_int;
}
extern "C" {
pub fn GDALARGetNextUpdatedRegion(
hARIO: GDALAsyncReaderH,
dfTimeout: f64,
pnXBufOff: *mut libc::c_int,
pnYBufOff: *mut libc::c_int,
pnXBufSize: *mut libc::c_int,
pnYBufSize: *mut libc::c_int,
) -> GDALAsyncStatusType::Type;
}
extern "C" {
pub fn GDALARLockBuffer(hARIO: GDALAsyncReaderH, dfTimeout: f64) -> libc::c_int;
}
extern "C" {
pub fn GDALARUnlockBuffer(hARIO: GDALAsyncReaderH);
}
extern "C" {
pub fn GDALGeneralCmdLineProcessor(
nArgc: libc::c_int,
ppapszArgv: *mut *mut *mut libc::c_char,
nOptions: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALSwapWords(
pData: *mut libc::c_void,
nWordSize: libc::c_int,
nWordCount: libc::c_int,
nWordSkip: libc::c_int,
);
}
extern "C" {
pub fn GDALSwapWordsEx(
pData: *mut libc::c_void,
nWordSize: libc::c_int,
nWordCount: usize,
nWordSkip: libc::c_int,
);
}
extern "C" {
pub fn GDALCopyWords(
pSrcData: *const libc::c_void,
eSrcType: GDALDataType::Type,
nSrcPixelOffset: libc::c_int,
pDstData: *mut libc::c_void,
eDstType: GDALDataType::Type,
nDstPixelOffset: libc::c_int,
nWordCount: libc::c_int,
);
}
extern "C" {
pub fn GDALCopyWords64(
pSrcData: *const libc::c_void,
eSrcType: GDALDataType::Type,
nSrcPixelOffset: libc::c_int,
pDstData: *mut libc::c_void,
eDstType: GDALDataType::Type,
nDstPixelOffset: libc::c_int,
nWordCount: GPtrDiff_t,
);
}
extern "C" {
pub fn GDALCopyBits(
pabySrcData: *const GByte,
nSrcOffset: libc::c_int,
nSrcStep: libc::c_int,
pabyDstData: *mut GByte,
nDstOffset: libc::c_int,
nDstStep: libc::c_int,
nBitCount: libc::c_int,
nStepCount: libc::c_int,
);
}
extern "C" {
pub fn GDALLoadWorldFile(arg1: *const libc::c_char, arg2: *mut f64) -> libc::c_int;
}
extern "C" {
pub fn GDALReadWorldFile(
arg1: *const libc::c_char,
arg2: *const libc::c_char,
arg3: *mut f64,
) -> libc::c_int;
}
extern "C" {
pub fn GDALWriteWorldFile(
arg1: *const libc::c_char,
arg2: *const libc::c_char,
arg3: *mut f64,
) -> libc::c_int;
}
extern "C" {
pub fn GDALLoadTabFile(
arg1: *const libc::c_char,
arg2: *mut f64,
arg3: *mut *mut libc::c_char,
arg4: *mut libc::c_int,
arg5: *mut *mut GDAL_GCP,
) -> libc::c_int;
}
extern "C" {
pub fn GDALReadTabFile(
arg1: *const libc::c_char,
arg2: *mut f64,
arg3: *mut *mut libc::c_char,
arg4: *mut libc::c_int,
arg5: *mut *mut GDAL_GCP,
) -> libc::c_int;
}
extern "C" {
pub fn GDALLoadOziMapFile(
arg1: *const libc::c_char,
arg2: *mut f64,
arg3: *mut *mut libc::c_char,
arg4: *mut libc::c_int,
arg5: *mut *mut GDAL_GCP,
) -> libc::c_int;
}
extern "C" {
pub fn GDALReadOziMapFile(
arg1: *const libc::c_char,
arg2: *mut f64,
arg3: *mut *mut libc::c_char,
arg4: *mut libc::c_int,
arg5: *mut *mut GDAL_GCP,
) -> libc::c_int;
}
extern "C" {
pub fn GDALDecToDMS(
arg1: f64,
arg2: *const libc::c_char,
arg3: libc::c_int,
) -> *const libc::c_char;
}
extern "C" {
pub fn GDALPackedDMSToDec(arg1: f64) -> f64;
}
extern "C" {
pub fn GDALDecToPackedDMS(arg1: f64) -> f64;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALRPCInfoV1 {
pub dfLINE_OFF: f64,
pub dfSAMP_OFF: f64,
pub dfLAT_OFF: f64,
pub dfLONG_OFF: f64,
pub dfHEIGHT_OFF: f64,
pub dfLINE_SCALE: f64,
pub dfSAMP_SCALE: f64,
pub dfLAT_SCALE: f64,
pub dfLONG_SCALE: f64,
pub dfHEIGHT_SCALE: f64,
pub adfLINE_NUM_COEFF: [f64; 20usize],
pub adfLINE_DEN_COEFF: [f64; 20usize],
pub adfSAMP_NUM_COEFF: [f64; 20usize],
pub adfSAMP_DEN_COEFF: [f64; 20usize],
pub dfMIN_LONG: f64,
pub dfMIN_LAT: f64,
pub dfMAX_LONG: f64,
pub dfMAX_LAT: f64,
}
#[test]
fn bindgen_test_layout_GDALRPCInfoV1() {
assert_eq!(
::std::mem::size_of::<GDALRPCInfoV1>(),
752usize,
concat!("Size of: ", stringify!(GDALRPCInfoV1))
);
assert_eq!(
::std::mem::align_of::<GDALRPCInfoV1>(),
8usize,
concat!("Alignment of ", stringify!(GDALRPCInfoV1))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfLINE_OFF as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfLINE_OFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfSAMP_OFF as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfSAMP_OFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfLAT_OFF as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfLAT_OFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfLONG_OFF as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfLONG_OFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfHEIGHT_OFF as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfHEIGHT_OFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfLINE_SCALE as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfLINE_SCALE)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfSAMP_SCALE as *const _ as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfSAMP_SCALE)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfLAT_SCALE as *const _ as usize },
56usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfLAT_SCALE)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfLONG_SCALE as *const _ as usize },
64usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfLONG_SCALE)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfHEIGHT_SCALE as *const _ as usize },
72usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfHEIGHT_SCALE)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).adfLINE_NUM_COEFF as *const _ as usize },
80usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(adfLINE_NUM_COEFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).adfLINE_DEN_COEFF as *const _ as usize },
240usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(adfLINE_DEN_COEFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).adfSAMP_NUM_COEFF as *const _ as usize },
400usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(adfSAMP_NUM_COEFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).adfSAMP_DEN_COEFF as *const _ as usize },
560usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(adfSAMP_DEN_COEFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfMIN_LONG as *const _ as usize },
720usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfMIN_LONG)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfMIN_LAT as *const _ as usize },
728usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfMIN_LAT)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfMAX_LONG as *const _ as usize },
736usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfMAX_LONG)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV1>())).dfMAX_LAT as *const _ as usize },
744usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV1),
"::",
stringify!(dfMAX_LAT)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALRPCInfoV2 {
pub dfLINE_OFF: f64,
pub dfSAMP_OFF: f64,
pub dfLAT_OFF: f64,
pub dfLONG_OFF: f64,
pub dfHEIGHT_OFF: f64,
pub dfLINE_SCALE: f64,
pub dfSAMP_SCALE: f64,
pub dfLAT_SCALE: f64,
pub dfLONG_SCALE: f64,
pub dfHEIGHT_SCALE: f64,
pub adfLINE_NUM_COEFF: [f64; 20usize],
pub adfLINE_DEN_COEFF: [f64; 20usize],
pub adfSAMP_NUM_COEFF: [f64; 20usize],
pub adfSAMP_DEN_COEFF: [f64; 20usize],
pub dfMIN_LONG: f64,
pub dfMIN_LAT: f64,
pub dfMAX_LONG: f64,
pub dfMAX_LAT: f64,
pub dfERR_BIAS: f64,
pub dfERR_RAND: f64,
}
#[test]
fn bindgen_test_layout_GDALRPCInfoV2() {
assert_eq!(
::std::mem::size_of::<GDALRPCInfoV2>(),
768usize,
concat!("Size of: ", stringify!(GDALRPCInfoV2))
);
assert_eq!(
::std::mem::align_of::<GDALRPCInfoV2>(),
8usize,
concat!("Alignment of ", stringify!(GDALRPCInfoV2))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfLINE_OFF as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfLINE_OFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfSAMP_OFF as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfSAMP_OFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfLAT_OFF as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfLAT_OFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfLONG_OFF as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfLONG_OFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfHEIGHT_OFF as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfHEIGHT_OFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfLINE_SCALE as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfLINE_SCALE)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfSAMP_SCALE as *const _ as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfSAMP_SCALE)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfLAT_SCALE as *const _ as usize },
56usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfLAT_SCALE)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfLONG_SCALE as *const _ as usize },
64usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfLONG_SCALE)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfHEIGHT_SCALE as *const _ as usize },
72usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfHEIGHT_SCALE)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).adfLINE_NUM_COEFF as *const _ as usize },
80usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(adfLINE_NUM_COEFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).adfLINE_DEN_COEFF as *const _ as usize },
240usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(adfLINE_DEN_COEFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).adfSAMP_NUM_COEFF as *const _ as usize },
400usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(adfSAMP_NUM_COEFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).adfSAMP_DEN_COEFF as *const _ as usize },
560usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(adfSAMP_DEN_COEFF)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfMIN_LONG as *const _ as usize },
720usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfMIN_LONG)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfMIN_LAT as *const _ as usize },
728usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfMIN_LAT)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfMAX_LONG as *const _ as usize },
736usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfMAX_LONG)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfMAX_LAT as *const _ as usize },
744usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfMAX_LAT)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfERR_BIAS as *const _ as usize },
752usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfERR_BIAS)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALRPCInfoV2>())).dfERR_RAND as *const _ as usize },
760usize,
concat!(
"Offset of field: ",
stringify!(GDALRPCInfoV2),
"::",
stringify!(dfERR_RAND)
)
);
}
extern "C" {
pub fn GDALExtractRPCInfoV1(arg1: CSLConstList, arg2: *mut GDALRPCInfoV1) -> libc::c_int;
}
extern "C" {
pub fn GDALExtractRPCInfoV2(arg1: CSLConstList, arg2: *mut GDALRPCInfoV2) -> libc::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALColorEntry {
pub c1: libc::c_short,
pub c2: libc::c_short,
pub c3: libc::c_short,
pub c4: libc::c_short,
}
#[test]
fn bindgen_test_layout_GDALColorEntry() {
assert_eq!(
::std::mem::size_of::<GDALColorEntry>(),
8usize,
concat!("Size of: ", stringify!(GDALColorEntry))
);
assert_eq!(
::std::mem::align_of::<GDALColorEntry>(),
2usize,
concat!("Alignment of ", stringify!(GDALColorEntry))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALColorEntry>())).c1 as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(GDALColorEntry),
"::",
stringify!(c1)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALColorEntry>())).c2 as *const _ as usize },
2usize,
concat!(
"Offset of field: ",
stringify!(GDALColorEntry),
"::",
stringify!(c2)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALColorEntry>())).c3 as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(GDALColorEntry),
"::",
stringify!(c3)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALColorEntry>())).c4 as *const _ as usize },
6usize,
concat!(
"Offset of field: ",
stringify!(GDALColorEntry),
"::",
stringify!(c4)
)
);
}
extern "C" {
pub fn GDALCreateColorTable(arg1: GDALPaletteInterp::Type) -> GDALColorTableH;
}
extern "C" {
pub fn GDALDestroyColorTable(arg1: GDALColorTableH);
}
extern "C" {
pub fn GDALCloneColorTable(arg1: GDALColorTableH) -> GDALColorTableH;
}
extern "C" {
pub fn GDALGetPaletteInterpretation(arg1: GDALColorTableH) -> GDALPaletteInterp::Type;
}
extern "C" {
pub fn GDALGetColorEntryCount(arg1: GDALColorTableH) -> libc::c_int;
}
extern "C" {
pub fn GDALGetColorEntry(arg1: GDALColorTableH, arg2: libc::c_int) -> *const GDALColorEntry;
}
extern "C" {
pub fn GDALGetColorEntryAsRGB(
arg1: GDALColorTableH,
arg2: libc::c_int,
arg3: *mut GDALColorEntry,
) -> libc::c_int;
}
extern "C" {
pub fn GDALSetColorEntry(arg1: GDALColorTableH, arg2: libc::c_int, arg3: *const GDALColorEntry);
}
extern "C" {
pub fn GDALCreateColorRamp(
hTable: GDALColorTableH,
nStartIndex: libc::c_int,
psStartColor: *const GDALColorEntry,
nEndIndex: libc::c_int,
psEndColor: *const GDALColorEntry,
);
}
pub mod GDALRATFieldType {
pub type Type = libc::c_uint;
pub const GFT_Integer: Type = 0;
pub const GFT_Real: Type = 1;
pub const GFT_String: Type = 2;
}
pub mod GDALRATFieldUsage {
pub type Type = libc::c_uint;
pub const GFU_Generic: Type = 0;
pub const GFU_PixelCount: Type = 1;
pub const GFU_Name: Type = 2;
pub const GFU_Min: Type = 3;
pub const GFU_Max: Type = 4;
pub const GFU_MinMax: Type = 5;
pub const GFU_Red: Type = 6;
pub const GFU_Green: Type = 7;
pub const GFU_Blue: Type = 8;
pub const GFU_Alpha: Type = 9;
pub const GFU_RedMin: Type = 10;
pub const GFU_GreenMin: Type = 11;
pub const GFU_BlueMin: Type = 12;
pub const GFU_AlphaMin: Type = 13;
pub const GFU_RedMax: Type = 14;
pub const GFU_GreenMax: Type = 15;
pub const GFU_BlueMax: Type = 16;
pub const GFU_AlphaMax: Type = 17;
pub const GFU_MaxCount: Type = 18;
}
pub mod GDALRATTableType {
pub type Type = libc::c_uint;
pub const GRTT_THEMATIC: Type = 0;
pub const GRTT_ATHEMATIC: Type = 1;
}
extern "C" {
pub fn GDALCreateRasterAttributeTable() -> GDALRasterAttributeTableH;
}
extern "C" {
pub fn GDALDestroyRasterAttributeTable(arg1: GDALRasterAttributeTableH);
}
extern "C" {
pub fn GDALRATGetColumnCount(arg1: GDALRasterAttributeTableH) -> libc::c_int;
}
extern "C" {
pub fn GDALRATGetNameOfCol(
arg1: GDALRasterAttributeTableH,
arg2: libc::c_int,
) -> *const libc::c_char;
}
extern "C" {
pub fn GDALRATGetUsageOfCol(
arg1: GDALRasterAttributeTableH,
arg2: libc::c_int,
) -> GDALRATFieldUsage::Type;
}
extern "C" {
pub fn GDALRATGetTypeOfCol(
arg1: GDALRasterAttributeTableH,
arg2: libc::c_int,
) -> GDALRATFieldType::Type;
}
extern "C" {
pub fn GDALRATGetColOfUsage(
arg1: GDALRasterAttributeTableH,
arg2: GDALRATFieldUsage::Type,
) -> libc::c_int;
}
extern "C" {
pub fn GDALRATGetRowCount(arg1: GDALRasterAttributeTableH) -> libc::c_int;
}
extern "C" {
pub fn GDALRATGetValueAsString(
arg1: GDALRasterAttributeTableH,
arg2: libc::c_int,
arg3: libc::c_int,
) -> *const libc::c_char;
}
extern "C" {
pub fn GDALRATGetValueAsInt(
arg1: GDALRasterAttributeTableH,
arg2: libc::c_int,
arg3: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALRATGetValueAsDouble(
arg1: GDALRasterAttributeTableH,
arg2: libc::c_int,
arg3: libc::c_int,
) -> f64;
}
extern "C" {
pub fn GDALRATSetValueAsString(
arg1: GDALRasterAttributeTableH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: *const libc::c_char,
);
}
extern "C" {
pub fn GDALRATSetValueAsInt(
arg1: GDALRasterAttributeTableH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: libc::c_int,
);
}
extern "C" {
pub fn GDALRATSetValueAsDouble(
arg1: GDALRasterAttributeTableH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: f64,
);
}
extern "C" {
pub fn GDALRATChangesAreWrittenToFile(hRAT: GDALRasterAttributeTableH) -> libc::c_int;
}
extern "C" {
pub fn GDALRATValuesIOAsDouble(
hRAT: GDALRasterAttributeTableH,
eRWFlag: GDALRWFlag::Type,
iField: libc::c_int,
iStartRow: libc::c_int,
iLength: libc::c_int,
pdfData: *mut f64,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRATValuesIOAsInteger(
hRAT: GDALRasterAttributeTableH,
eRWFlag: GDALRWFlag::Type,
iField: libc::c_int,
iStartRow: libc::c_int,
iLength: libc::c_int,
pnData: *mut libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRATValuesIOAsString(
hRAT: GDALRasterAttributeTableH,
eRWFlag: GDALRWFlag::Type,
iField: libc::c_int,
iStartRow: libc::c_int,
iLength: libc::c_int,
papszStrList: CSLConstList,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRATSetRowCount(arg1: GDALRasterAttributeTableH, arg2: libc::c_int);
}
extern "C" {
pub fn GDALRATCreateColumn(
arg1: GDALRasterAttributeTableH,
arg2: *const libc::c_char,
arg3: GDALRATFieldType::Type,
arg4: GDALRATFieldUsage::Type,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRATSetLinearBinning(
arg1: GDALRasterAttributeTableH,
arg2: f64,
arg3: f64,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRATGetLinearBinning(
arg1: GDALRasterAttributeTableH,
arg2: *mut f64,
arg3: *mut f64,
) -> libc::c_int;
}
extern "C" {
pub fn GDALRATSetTableType(
hRAT: GDALRasterAttributeTableH,
eInTableType: GDALRATTableType::Type,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRATGetTableType(hRAT: GDALRasterAttributeTableH) -> GDALRATTableType::Type;
}
extern "C" {
pub fn GDALRATInitializeFromColorTable(
arg1: GDALRasterAttributeTableH,
arg2: GDALColorTableH,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRATTranslateToColorTable(
arg1: GDALRasterAttributeTableH,
nEntryCount: libc::c_int,
) -> GDALColorTableH;
}
extern "C" {
pub fn GDALRATDumpReadable(arg1: GDALRasterAttributeTableH, arg2: *mut FILE);
}
extern "C" {
pub fn GDALRATClone(arg1: GDALRasterAttributeTableH) -> GDALRasterAttributeTableH;
}
extern "C" {
pub fn GDALRATSerializeJSON(arg1: GDALRasterAttributeTableH) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALRATGetRowOfValue(arg1: GDALRasterAttributeTableH, arg2: f64) -> libc::c_int;
}
extern "C" {
pub fn GDALRATRemoveStatistics(arg1: GDALRasterAttributeTableH);
}
extern "C" {
pub fn GDALSetCacheMax(nBytes: libc::c_int);
}
extern "C" {
pub fn GDALGetCacheMax() -> libc::c_int;
}
extern "C" {
pub fn GDALGetCacheUsed() -> libc::c_int;
}
extern "C" {
pub fn GDALSetCacheMax64(nBytes: GIntBig);
}
extern "C" {
pub fn GDALGetCacheMax64() -> GIntBig;
}
extern "C" {
pub fn GDALGetCacheUsed64() -> GIntBig;
}
extern "C" {
pub fn GDALFlushCacheBlock() -> libc::c_int;
}
extern "C" {
pub fn GDALDatasetGetVirtualMem(
hDS: GDALDatasetH,
eRWFlag: GDALRWFlag::Type,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
nBufXSize: libc::c_int,
nBufYSize: libc::c_int,
eBufType: GDALDataType::Type,
nBandCount: libc::c_int,
panBandMap: *mut libc::c_int,
nPixelSpace: libc::c_int,
nLineSpace: GIntBig,
nBandSpace: GIntBig,
nCacheSize: usize,
nPageSizeHint: usize,
bSingleThreadUsage: libc::c_int,
papszOptions: CSLConstList,
) -> *mut CPLVirtualMem;
}
extern "C" {
pub fn GDALRasterBandGetVirtualMem(
hBand: GDALRasterBandH,
eRWFlag: GDALRWFlag::Type,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
nBufXSize: libc::c_int,
nBufYSize: libc::c_int,
eBufType: GDALDataType::Type,
nPixelSpace: libc::c_int,
nLineSpace: GIntBig,
nCacheSize: usize,
nPageSizeHint: usize,
bSingleThreadUsage: libc::c_int,
papszOptions: CSLConstList,
) -> *mut CPLVirtualMem;
}
extern "C" {
pub fn GDALGetVirtualMemAuto(
hBand: GDALRasterBandH,
eRWFlag: GDALRWFlag::Type,
pnPixelSpace: *mut libc::c_int,
pnLineSpace: *mut GIntBig,
papszOptions: CSLConstList,
) -> *mut CPLVirtualMem;
}
pub mod GDALTileOrganization {
pub type Type = libc::c_uint;
pub const GTO_TIP: Type = 0;
pub const GTO_BIT: Type = 1;
pub const GTO_BSQ: Type = 2;
}
extern "C" {
pub fn GDALDatasetGetTiledVirtualMem(
hDS: GDALDatasetH,
eRWFlag: GDALRWFlag::Type,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
nTileXSize: libc::c_int,
nTileYSize: libc::c_int,
eBufType: GDALDataType::Type,
nBandCount: libc::c_int,
panBandMap: *mut libc::c_int,
eTileOrganization: GDALTileOrganization::Type,
nCacheSize: usize,
bSingleThreadUsage: libc::c_int,
papszOptions: CSLConstList,
) -> *mut CPLVirtualMem;
}
extern "C" {
pub fn GDALRasterBandGetTiledVirtualMem(
hBand: GDALRasterBandH,
eRWFlag: GDALRWFlag::Type,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
nTileXSize: libc::c_int,
nTileYSize: libc::c_int,
eBufType: GDALDataType::Type,
nCacheSize: usize,
bSingleThreadUsage: libc::c_int,
papszOptions: CSLConstList,
) -> *mut CPLVirtualMem;
}
extern "C" {
pub fn GDALCreatePansharpenedVRT(
pszXML: *const libc::c_char,
hPanchroBand: GDALRasterBandH,
nInputSpectralBands: libc::c_int,
pahInputSpectralBands: *mut GDALRasterBandH,
) -> GDALDatasetH;
}
extern "C" {
pub fn GDALGetJPEG2000Structure(
pszFilename: *const libc::c_char,
papszOptions: CSLConstList,
) -> *mut CPLXMLNode;
}
extern "C" {
pub fn GDALCreateMultiDimensional(
hDriver: GDALDriverH,
pszName: *const libc::c_char,
papszRootGroupOptions: CSLConstList,
papszOptions: CSLConstList,
) -> GDALDatasetH;
}
extern "C" {
pub fn GDALExtendedDataTypeCreate(eType: GDALDataType::Type) -> GDALExtendedDataTypeH;
}
extern "C" {
pub fn GDALExtendedDataTypeCreateString(nMaxStringLength: usize) -> GDALExtendedDataTypeH;
}
extern "C" {
pub fn GDALExtendedDataTypeCreateStringEx(
nMaxStringLength: usize,
eSubType: GDALExtendedDataTypeSubType::Type,
) -> GDALExtendedDataTypeH;
}
extern "C" {
pub fn GDALExtendedDataTypeCreateCompound(
pszName: *const libc::c_char,
nTotalSize: usize,
nComponents: usize,
comps: *const GDALEDTComponentH,
) -> GDALExtendedDataTypeH;
}
extern "C" {
pub fn GDALExtendedDataTypeRelease(hEDT: GDALExtendedDataTypeH);
}
extern "C" {
pub fn GDALExtendedDataTypeGetName(hEDT: GDALExtendedDataTypeH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALExtendedDataTypeGetClass(
hEDT: GDALExtendedDataTypeH,
) -> GDALExtendedDataTypeClass::Type;
}
extern "C" {
pub fn GDALExtendedDataTypeGetNumericDataType(
hEDT: GDALExtendedDataTypeH,
) -> GDALDataType::Type;
}
extern "C" {
pub fn GDALExtendedDataTypeGetSize(hEDT: GDALExtendedDataTypeH) -> usize;
}
extern "C" {
pub fn GDALExtendedDataTypeGetMaxStringLength(hEDT: GDALExtendedDataTypeH) -> usize;
}
extern "C" {
pub fn GDALExtendedDataTypeGetComponents(
hEDT: GDALExtendedDataTypeH,
pnCount: *mut usize,
) -> *mut GDALEDTComponentH;
}
extern "C" {
pub fn GDALExtendedDataTypeFreeComponents(components: *mut GDALEDTComponentH, nCount: usize);
}
extern "C" {
pub fn GDALExtendedDataTypeCanConvertTo(
hSourceEDT: GDALExtendedDataTypeH,
hTargetEDT: GDALExtendedDataTypeH,
) -> libc::c_int;
}
extern "C" {
pub fn GDALExtendedDataTypeEquals(
hFirstEDT: GDALExtendedDataTypeH,
hSecondEDT: GDALExtendedDataTypeH,
) -> libc::c_int;
}
extern "C" {
pub fn GDALExtendedDataTypeGetSubType(
hEDT: GDALExtendedDataTypeH,
) -> GDALExtendedDataTypeSubType::Type;
}
extern "C" {
pub fn GDALEDTComponentCreate(
pszName: *const libc::c_char,
nOffset: usize,
hType: GDALExtendedDataTypeH,
) -> GDALEDTComponentH;
}
extern "C" {
pub fn GDALEDTComponentRelease(hComp: GDALEDTComponentH);
}
extern "C" {
pub fn GDALEDTComponentGetName(hComp: GDALEDTComponentH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALEDTComponentGetOffset(hComp: GDALEDTComponentH) -> usize;
}
extern "C" {
pub fn GDALEDTComponentGetType(hComp: GDALEDTComponentH) -> GDALExtendedDataTypeH;
}
extern "C" {
pub fn GDALDatasetGetRootGroup(hDS: GDALDatasetH) -> GDALGroupH;
}
extern "C" {
pub fn GDALGroupRelease(hGroup: GDALGroupH);
}
extern "C" {
pub fn GDALGroupGetName(hGroup: GDALGroupH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALGroupGetFullName(hGroup: GDALGroupH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALGroupGetMDArrayNames(
hGroup: GDALGroupH,
papszOptions: CSLConstList,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn GDALGroupOpenMDArray(
hGroup: GDALGroupH,
pszMDArrayName: *const libc::c_char,
papszOptions: CSLConstList,
) -> GDALMDArrayH;
}
extern "C" {
pub fn GDALGroupOpenMDArrayFromFullname(
hGroup: GDALGroupH,
pszMDArrayName: *const libc::c_char,
papszOptions: CSLConstList,
) -> GDALMDArrayH;
}
extern "C" {
pub fn GDALGroupResolveMDArray(
hGroup: GDALGroupH,
pszName: *const libc::c_char,
pszStartingPoint: *const libc::c_char,
papszOptions: CSLConstList,
) -> GDALMDArrayH;
}
extern "C" {
pub fn GDALGroupGetGroupNames(
hGroup: GDALGroupH,
papszOptions: CSLConstList,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn GDALGroupOpenGroup(
hGroup: GDALGroupH,
pszSubGroupName: *const libc::c_char,
papszOptions: CSLConstList,
) -> GDALGroupH;
}
extern "C" {
pub fn GDALGroupOpenGroupFromFullname(
hGroup: GDALGroupH,
pszMDArrayName: *const libc::c_char,
papszOptions: CSLConstList,
) -> GDALGroupH;
}
extern "C" {
pub fn GDALGroupGetVectorLayerNames(
hGroup: GDALGroupH,
papszOptions: CSLConstList,
) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn GDALGroupOpenVectorLayer(
hGroup: GDALGroupH,
pszVectorLayerName: *const libc::c_char,
papszOptions: CSLConstList,
) -> OGRLayerH;
}
extern "C" {
pub fn GDALGroupGetDimensions(
hGroup: GDALGroupH,
pnCount: *mut usize,
papszOptions: CSLConstList,
) -> *mut GDALDimensionH;
}
extern "C" {
pub fn GDALGroupGetAttribute(
hGroup: GDALGroupH,
pszName: *const libc::c_char,
) -> GDALAttributeH;
}
extern "C" {
pub fn GDALGroupGetAttributes(
hGroup: GDALGroupH,
pnCount: *mut usize,
papszOptions: CSLConstList,
) -> *mut GDALAttributeH;
}
extern "C" {
pub fn GDALGroupGetStructuralInfo(hGroup: GDALGroupH) -> CSLConstList;
}
extern "C" {
pub fn GDALGroupCreateGroup(
hGroup: GDALGroupH,
pszSubGroupName: *const libc::c_char,
papszOptions: CSLConstList,
) -> GDALGroupH;
}
extern "C" {
pub fn GDALGroupCreateDimension(
hGroup: GDALGroupH,
pszName: *const libc::c_char,
pszType: *const libc::c_char,
pszDirection: *const libc::c_char,
nSize: GUInt64,
papszOptions: CSLConstList,
) -> GDALDimensionH;
}
extern "C" {
pub fn GDALGroupCreateMDArray(
hGroup: GDALGroupH,
pszName: *const libc::c_char,
nDimensions: usize,
pahDimensions: *mut GDALDimensionH,
hEDT: GDALExtendedDataTypeH,
papszOptions: CSLConstList,
) -> GDALMDArrayH;
}
extern "C" {
pub fn GDALGroupCreateAttribute(
hGroup: GDALGroupH,
pszName: *const libc::c_char,
nDimensions: usize,
panDimensions: *const GUInt64,
hEDT: GDALExtendedDataTypeH,
papszOptions: CSLConstList,
) -> GDALAttributeH;
}
extern "C" {
pub fn GDALMDArrayRelease(hMDArray: GDALMDArrayH);
}
extern "C" {
pub fn GDALMDArrayGetName(hArray: GDALMDArrayH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALMDArrayGetFullName(hArray: GDALMDArrayH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALMDArrayGetTotalElementsCount(hArray: GDALMDArrayH) -> GUInt64;
}
extern "C" {
pub fn GDALMDArrayGetDimensionCount(hArray: GDALMDArrayH) -> usize;
}
extern "C" {
pub fn GDALMDArrayGetDimensions(
hArray: GDALMDArrayH,
pnCount: *mut usize,
) -> *mut GDALDimensionH;
}
extern "C" {
pub fn GDALMDArrayGetDataType(hArray: GDALMDArrayH) -> GDALExtendedDataTypeH;
}
extern "C" {
pub fn GDALMDArrayRead(
hArray: GDALMDArrayH,
arrayStartIdx: *const GUInt64,
count: *const usize,
arrayStep: *const GInt64,
bufferStride: *const GPtrDiff_t,
bufferDatatype: GDALExtendedDataTypeH,
pDstBuffer: *mut libc::c_void,
pDstBufferAllocStart: *const libc::c_void,
nDstBufferllocSize: usize,
) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArrayWrite(
hArray: GDALMDArrayH,
arrayStartIdx: *const GUInt64,
count: *const usize,
arrayStep: *const GInt64,
bufferStride: *const GPtrDiff_t,
bufferDatatype: GDALExtendedDataTypeH,
pSrcBuffer: *const libc::c_void,
psrcBufferAllocStart: *const libc::c_void,
nSrcBufferllocSize: usize,
) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArrayAdviseRead(
hArray: GDALMDArrayH,
arrayStartIdx: *const GUInt64,
count: *const usize,
) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArrayAdviseReadEx(
hArray: GDALMDArrayH,
arrayStartIdx: *const GUInt64,
count: *const usize,
papszOptions: CSLConstList,
) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArrayGetAttribute(
hArray: GDALMDArrayH,
pszName: *const libc::c_char,
) -> GDALAttributeH;
}
extern "C" {
pub fn GDALMDArrayGetAttributes(
hArray: GDALMDArrayH,
pnCount: *mut usize,
papszOptions: CSLConstList,
) -> *mut GDALAttributeH;
}
extern "C" {
pub fn GDALMDArrayCreateAttribute(
hArray: GDALMDArrayH,
pszName: *const libc::c_char,
nDimensions: usize,
panDimensions: *const GUInt64,
hEDT: GDALExtendedDataTypeH,
papszOptions: CSLConstList,
) -> GDALAttributeH;
}
extern "C" {
pub fn GDALMDArrayGetRawNoDataValue(hArray: GDALMDArrayH) -> *const libc::c_void;
}
extern "C" {
pub fn GDALMDArrayGetNoDataValueAsDouble(
hArray: GDALMDArrayH,
pbHasNoDataValue: *mut libc::c_int,
) -> f64;
}
extern "C" {
pub fn GDALMDArraySetRawNoDataValue(
hArray: GDALMDArrayH,
arg1: *const libc::c_void,
) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArraySetNoDataValueAsDouble(
hArray: GDALMDArrayH,
dfNoDataValue: f64,
) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArraySetScale(hArray: GDALMDArrayH, dfScale: f64) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArraySetScaleEx(
hArray: GDALMDArrayH,
dfScale: f64,
eStorageType: GDALDataType::Type,
) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArrayGetScale(hArray: GDALMDArrayH, pbHasValue: *mut libc::c_int) -> f64;
}
extern "C" {
pub fn GDALMDArrayGetScaleEx(
hArray: GDALMDArrayH,
pbHasValue: *mut libc::c_int,
peStorageType: *mut GDALDataType::Type,
) -> f64;
}
extern "C" {
pub fn GDALMDArraySetOffset(hArray: GDALMDArrayH, dfOffset: f64) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArraySetOffsetEx(
hArray: GDALMDArrayH,
dfOffset: f64,
eStorageType: GDALDataType::Type,
) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArrayGetOffset(hArray: GDALMDArrayH, pbHasValue: *mut libc::c_int) -> f64;
}
extern "C" {
pub fn GDALMDArrayGetOffsetEx(
hArray: GDALMDArrayH,
pbHasValue: *mut libc::c_int,
peStorageType: *mut GDALDataType::Type,
) -> f64;
}
extern "C" {
pub fn GDALMDArrayGetBlockSize(hArray: GDALMDArrayH, pnCount: *mut usize) -> *mut GUInt64;
}
extern "C" {
pub fn GDALMDArraySetUnit(hArray: GDALMDArrayH, arg1: *const libc::c_char) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArrayGetUnit(hArray: GDALMDArrayH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALMDArraySetSpatialRef(arg1: GDALMDArrayH, arg2: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArrayGetSpatialRef(hArray: GDALMDArrayH) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn GDALMDArrayGetProcessingChunkSize(
hArray: GDALMDArrayH,
pnCount: *mut usize,
nMaxChunkMemory: usize,
) -> *mut usize;
}
extern "C" {
pub fn GDALMDArrayGetStructuralInfo(hArray: GDALMDArrayH) -> CSLConstList;
}
extern "C" {
pub fn GDALMDArrayGetView(
hArray: GDALMDArrayH,
pszViewExpr: *const libc::c_char,
) -> GDALMDArrayH;
}
extern "C" {
pub fn GDALMDArrayTranspose(
hArray: GDALMDArrayH,
nNewAxisCount: usize,
panMapNewAxisToOldAxis: *const libc::c_int,
) -> GDALMDArrayH;
}
extern "C" {
pub fn GDALMDArrayGetUnscaled(hArray: GDALMDArrayH) -> GDALMDArrayH;
}
extern "C" {
pub fn GDALMDArrayGetMask(hArray: GDALMDArrayH, papszOptions: CSLConstList) -> GDALMDArrayH;
}
extern "C" {
pub fn GDALMDArrayAsClassicDataset(
hArray: GDALMDArrayH,
iXDim: usize,
iYDim: usize,
) -> GDALDatasetH;
}
extern "C" {
pub fn GDALMDArrayGetStatistics(
hArray: GDALMDArrayH,
arg1: GDALDatasetH,
bApproxOK: libc::c_int,
bForce: libc::c_int,
pdfMin: *mut f64,
pdfMax: *mut f64,
pdfMean: *mut f64,
pdfStdDev: *mut f64,
pnValidCount: *mut GUInt64,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALMDArrayComputeStatistics(
hArray: GDALMDArrayH,
arg1: GDALDatasetH,
bApproxOK: libc::c_int,
pdfMin: *mut f64,
pdfMax: *mut f64,
pdfMean: *mut f64,
pdfStdDev: *mut f64,
pnValidCount: *mut GUInt64,
arg2: GDALProgressFunc,
pProgressData: *mut libc::c_void,
) -> libc::c_int;
}
extern "C" {
pub fn GDALMDArrayGetResampled(
hArray: GDALMDArrayH,
nNewDimCount: usize,
pahNewDims: *const GDALDimensionH,
resampleAlg: GDALRIOResampleAlg::Type,
hTargetSRS: OGRSpatialReferenceH,
papszOptions: CSLConstList,
) -> GDALMDArrayH;
}
extern "C" {
pub fn GDALMDArrayGetCoordinateVariables(
hArray: GDALMDArrayH,
pnCount: *mut usize,
) -> *mut GDALMDArrayH;
}
extern "C" {
pub fn GDALReleaseArrays(arrays: *mut GDALMDArrayH, nCount: usize);
}
extern "C" {
pub fn GDALMDArrayCache(hArray: GDALMDArrayH, papszOptions: CSLConstList) -> libc::c_int;
}
extern "C" {
pub fn GDALAttributeRelease(hAttr: GDALAttributeH);
}
extern "C" {
pub fn GDALReleaseAttributes(attributes: *mut GDALAttributeH, nCount: usize);
}
extern "C" {
pub fn GDALAttributeGetName(hAttr: GDALAttributeH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALAttributeGetFullName(hAttr: GDALAttributeH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALAttributeGetTotalElementsCount(hAttr: GDALAttributeH) -> GUInt64;
}
extern "C" {
pub fn GDALAttributeGetDimensionCount(hAttr: GDALAttributeH) -> usize;
}
extern "C" {
pub fn GDALAttributeGetDimensionsSize(
hAttr: GDALAttributeH,
pnCount: *mut usize,
) -> *mut GUInt64;
}
extern "C" {
pub fn GDALAttributeGetDataType(hAttr: GDALAttributeH) -> GDALExtendedDataTypeH;
}
extern "C" {
pub fn GDALAttributeReadAsRaw(hAttr: GDALAttributeH, pnSize: *mut usize) -> *mut GByte;
}
extern "C" {
pub fn GDALAttributeFreeRawResult(hAttr: GDALAttributeH, raw: *mut GByte, nSize: usize);
}
extern "C" {
pub fn GDALAttributeReadAsString(hAttr: GDALAttributeH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALAttributeReadAsInt(hAttr: GDALAttributeH) -> libc::c_int;
}
extern "C" {
pub fn GDALAttributeReadAsDouble(hAttr: GDALAttributeH) -> f64;
}
extern "C" {
pub fn GDALAttributeReadAsStringArray(hAttr: GDALAttributeH) -> *mut *mut libc::c_char;
}
extern "C" {
pub fn GDALAttributeReadAsIntArray(
hAttr: GDALAttributeH,
pnCount: *mut usize,
) -> *mut libc::c_int;
}
extern "C" {
pub fn GDALAttributeReadAsDoubleArray(hAttr: GDALAttributeH, pnCount: *mut usize) -> *mut f64;
}
extern "C" {
pub fn GDALAttributeWriteRaw(
hAttr: GDALAttributeH,
arg1: *const libc::c_void,
arg2: usize,
) -> libc::c_int;
}
extern "C" {
pub fn GDALAttributeWriteString(
hAttr: GDALAttributeH,
arg1: *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn GDALAttributeWriteStringArray(hAttr: GDALAttributeH, arg1: CSLConstList) -> libc::c_int;
}
extern "C" {
pub fn GDALAttributeWriteInt(hAttr: GDALAttributeH, arg1: libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn GDALAttributeWriteDouble(hAttr: GDALAttributeH, arg1: f64) -> libc::c_int;
}
extern "C" {
pub fn GDALAttributeWriteDoubleArray(
hAttr: GDALAttributeH,
arg1: *const f64,
arg2: usize,
) -> libc::c_int;
}
extern "C" {
pub fn GDALDimensionRelease(hDim: GDALDimensionH);
}
extern "C" {
pub fn GDALReleaseDimensions(dims: *mut GDALDimensionH, nCount: usize);
}
extern "C" {
pub fn GDALDimensionGetName(hDim: GDALDimensionH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALDimensionGetFullName(hDim: GDALDimensionH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALDimensionGetType(hDim: GDALDimensionH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALDimensionGetDirection(hDim: GDALDimensionH) -> *const libc::c_char;
}
extern "C" {
pub fn GDALDimensionGetSize(hDim: GDALDimensionH) -> GUInt64;
}
extern "C" {
pub fn GDALDimensionGetIndexingVariable(hDim: GDALDimensionH) -> GDALMDArrayH;
}
extern "C" {
pub fn GDALDimensionSetIndexingVariable(
hDim: GDALDimensionH,
hArray: GDALMDArrayH,
) -> libc::c_int;
}
extern "C" {
pub fn GDALComputeMedianCutPCT(
hRed: GDALRasterBandH,
hGreen: GDALRasterBandH,
hBlue: GDALRasterBandH,
pfnIncludePixel: ::std::option::Option<
unsafe extern "C" fn(
arg1: libc::c_int,
arg2: libc::c_int,
arg3: *mut libc::c_void,
) -> libc::c_int,
>,
nColors: libc::c_int,
hColorTable: GDALColorTableH,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> libc::c_int;
}
extern "C" {
pub fn GDALDitherRGB2PCT(
hRed: GDALRasterBandH,
hGreen: GDALRasterBandH,
hBlue: GDALRasterBandH,
hTarget: GDALRasterBandH,
hColorTable: GDALColorTableH,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> libc::c_int;
}
extern "C" {
pub fn GDALChecksumImage(
hBand: GDALRasterBandH,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALComputeProximity(
hSrcBand: GDALRasterBandH,
hProximityBand: GDALRasterBandH,
papszOptions: *mut *mut libc::c_char,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALFillNodata(
hTargetBand: GDALRasterBandH,
hMaskBand: GDALRasterBandH,
dfMaxSearchDist: f64,
bDeprecatedOption: libc::c_int,
nSmoothingIterations: libc::c_int,
papszOptions: *mut *mut libc::c_char,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALPolygonize(
hSrcBand: GDALRasterBandH,
hMaskBand: GDALRasterBandH,
hOutLayer: OGRLayerH,
iPixValField: libc::c_int,
papszOptions: *mut *mut libc::c_char,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALFPolygonize(
hSrcBand: GDALRasterBandH,
hMaskBand: GDALRasterBandH,
hOutLayer: OGRLayerH,
iPixValField: libc::c_int,
papszOptions: *mut *mut libc::c_char,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALSieveFilter(
hSrcBand: GDALRasterBandH,
hMaskBand: GDALRasterBandH,
hDstBand: GDALRasterBandH,
nSizeThreshold: libc::c_int,
nConnectedness: libc::c_int,
papszOptions: *mut *mut libc::c_char,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> CPLErr::Type;
}
pub type GDALTransformerFunc = ::std::option::Option<
unsafe extern "C" fn(
pTransformerArg: *mut libc::c_void,
bDstToSrc: libc::c_int,
nPointCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
panSuccess: *mut libc::c_int,
) -> libc::c_int,
>;
extern "C" {
pub fn GDALDestroyTransformer(pTransformerArg: *mut libc::c_void);
}
extern "C" {
pub fn GDALUseTransformer(
pTransformerArg: *mut libc::c_void,
bDstToSrc: libc::c_int,
nPointCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
panSuccess: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALCreateSimilarTransformer(
psTransformerArg: *mut libc::c_void,
dfSrcRatioX: f64,
dfSrcRatioY: f64,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALCreateGenImgProjTransformer(
hSrcDS: GDALDatasetH,
pszSrcWKT: *const libc::c_char,
hDstDS: GDALDatasetH,
pszDstWKT: *const libc::c_char,
bGCPUseOK: libc::c_int,
dfGCPErrorThreshold: f64,
nOrder: libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALCreateGenImgProjTransformer2(
hSrcDS: GDALDatasetH,
hDstDS: GDALDatasetH,
papszOptions: *mut *mut libc::c_char,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALCreateGenImgProjTransformer3(
pszSrcWKT: *const libc::c_char,
padfSrcGeoTransform: *const f64,
pszDstWKT: *const libc::c_char,
padfDstGeoTransform: *const f64,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALCreateGenImgProjTransformer4(
hSrcSRS: OGRSpatialReferenceH,
padfSrcGeoTransform: *const f64,
hDstSRS: OGRSpatialReferenceH,
padfDstGeoTransform: *const f64,
papszOptions: *const *const libc::c_char,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALSetGenImgProjTransformerDstGeoTransform(arg1: *mut libc::c_void, arg2: *const f64);
}
extern "C" {
pub fn GDALDestroyGenImgProjTransformer(arg1: *mut libc::c_void);
}
extern "C" {
pub fn GDALGenImgProjTransform(
pTransformArg: *mut libc::c_void,
bDstToSrc: libc::c_int,
nPointCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
panSuccess: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALSetTransformerDstGeoTransform(arg1: *mut libc::c_void, arg2: *const f64);
}
extern "C" {
pub fn GDALGetTransformerDstGeoTransform(arg1: *mut libc::c_void, arg2: *mut f64);
}
extern "C" {
pub fn GDALCreateReprojectionTransformer(
pszSrcWKT: *const libc::c_char,
pszDstWKT: *const libc::c_char,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALCreateReprojectionTransformerEx(
hSrcSRS: OGRSpatialReferenceH,
hDstSRS: OGRSpatialReferenceH,
papszOptions: *const *const libc::c_char,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALDestroyReprojectionTransformer(arg1: *mut libc::c_void);
}
extern "C" {
pub fn GDALReprojectionTransform(
pTransformArg: *mut libc::c_void,
bDstToSrc: libc::c_int,
nPointCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
panSuccess: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALCreateGCPTransformer(
nGCPCount: libc::c_int,
pasGCPList: *const GDAL_GCP,
nReqOrder: libc::c_int,
bReversed: libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALCreateGCPRefineTransformer(
nGCPCount: libc::c_int,
pasGCPList: *const GDAL_GCP,
nReqOrder: libc::c_int,
bReversed: libc::c_int,
tolerance: f64,
minimumGcps: libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALDestroyGCPTransformer(pTransformArg: *mut libc::c_void);
}
extern "C" {
pub fn GDALGCPTransform(
pTransformArg: *mut libc::c_void,
bDstToSrc: libc::c_int,
nPointCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
panSuccess: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALCreateTPSTransformer(
nGCPCount: libc::c_int,
pasGCPList: *const GDAL_GCP,
bReversed: libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALDestroyTPSTransformer(pTransformArg: *mut libc::c_void);
}
extern "C" {
pub fn GDALTPSTransform(
pTransformArg: *mut libc::c_void,
bDstToSrc: libc::c_int,
nPointCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
panSuccess: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALCreateRPCTransformerV1(
psRPC: *mut GDALRPCInfoV1,
bReversed: libc::c_int,
dfPixErrThreshold: f64,
papszOptions: *mut *mut libc::c_char,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALCreateRPCTransformerV2(
psRPC: *const GDALRPCInfoV2,
bReversed: libc::c_int,
dfPixErrThreshold: f64,
papszOptions: *mut *mut libc::c_char,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALDestroyRPCTransformer(pTransformArg: *mut libc::c_void);
}
extern "C" {
pub fn GDALRPCTransform(
pTransformArg: *mut libc::c_void,
bDstToSrc: libc::c_int,
nPointCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
panSuccess: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALCreateGeoLocTransformer(
hBaseDS: GDALDatasetH,
papszGeolocationInfo: *mut *mut libc::c_char,
bReversed: libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALDestroyGeoLocTransformer(pTransformArg: *mut libc::c_void);
}
extern "C" {
pub fn GDALGeoLocTransform(
pTransformArg: *mut libc::c_void,
bDstToSrc: libc::c_int,
nPointCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
panSuccess: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALCreateApproxTransformer(
pfnRawTransformer: GDALTransformerFunc,
pRawTransformerArg: *mut libc::c_void,
dfMaxError: f64,
) -> *mut libc::c_void;
}
extern "C" {
pub fn GDALApproxTransformerOwnsSubtransformer(
pCBData: *mut libc::c_void,
bOwnFlag: libc::c_int,
);
}
extern "C" {
pub fn GDALDestroyApproxTransformer(pApproxArg: *mut libc::c_void);
}
extern "C" {
pub fn GDALApproxTransform(
pTransformArg: *mut libc::c_void,
bDstToSrc: libc::c_int,
nPointCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
panSuccess: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALSimpleImageWarp(
hSrcDS: GDALDatasetH,
hDstDS: GDALDatasetH,
nBandCount: libc::c_int,
panBandList: *mut libc::c_int,
pfnTransform: GDALTransformerFunc,
pTransformArg: *mut libc::c_void,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
papszWarpOptions: *mut *mut libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn GDALSuggestedWarpOutput(
hSrcDS: GDALDatasetH,
pfnTransformer: GDALTransformerFunc,
pTransformArg: *mut libc::c_void,
padfGeoTransformOut: *mut f64,
pnPixels: *mut libc::c_int,
pnLines: *mut libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALSuggestedWarpOutput2(
hSrcDS: GDALDatasetH,
pfnTransformer: GDALTransformerFunc,
pTransformArg: *mut libc::c_void,
padfGeoTransformOut: *mut f64,
pnPixels: *mut libc::c_int,
pnLines: *mut libc::c_int,
padfExtents: *mut f64,
nOptions: libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALSerializeTransformer(
pfnFunc: GDALTransformerFunc,
pTransformArg: *mut libc::c_void,
) -> *mut CPLXMLNode;
}
extern "C" {
pub fn GDALDeserializeTransformer(
psTree: *mut CPLXMLNode,
ppfnFunc: *mut GDALTransformerFunc,
ppTransformArg: *mut *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALTransformGeolocations(
hXBand: GDALRasterBandH,
hYBand: GDALRasterBandH,
hZBand: GDALRasterBandH,
pfnTransformer: GDALTransformerFunc,
pTransformArg: *mut libc::c_void,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
papszOptions: *mut *mut libc::c_char,
) -> CPLErr::Type;
}
pub type GDALContourWriter = ::std::option::Option<
unsafe extern "C" fn(
dfLevel: f64,
nPoints: libc::c_int,
padfX: *mut f64,
padfY: *mut f64,
arg1: *mut libc::c_void,
) -> CPLErr::Type,
>;
pub type GDALContourGeneratorH = *mut libc::c_void;
extern "C" {
pub fn GDAL_CG_Create(
nWidth: libc::c_int,
nHeight: libc::c_int,
bNoDataSet: libc::c_int,
dfNoDataValue: f64,
dfContourInterval: f64,
dfContourBase: f64,
pfnWriter: GDALContourWriter,
pCBData: *mut libc::c_void,
) -> GDALContourGeneratorH;
}
extern "C" {
pub fn GDAL_CG_FeedLine(hCG: GDALContourGeneratorH, padfScanline: *mut f64) -> CPLErr::Type;
}
extern "C" {
pub fn GDAL_CG_Destroy(hCG: GDALContourGeneratorH);
}
extern "C" {
pub fn OGRContourWriter(
arg1: f64,
arg2: libc::c_int,
arg3: *mut f64,
arg4: *mut f64,
pInfo: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALContourGenerate(
hBand: GDALRasterBandH,
dfContourInterval: f64,
dfContourBase: f64,
nFixedLevelCount: libc::c_int,
padfFixedLevels: *mut f64,
bUseNoData: libc::c_int,
dfNoDataValue: f64,
hLayer: *mut libc::c_void,
iIDField: libc::c_int,
iElevField: libc::c_int,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALContourGenerateEx(
hBand: GDALRasterBandH,
hLayer: *mut libc::c_void,
options: CSLConstList,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> CPLErr::Type;
}
pub mod GDALViewshedMode {
pub type Type = libc::c_uint;
pub const GVM_Diagonal: Type = 1;
pub const GVM_Edge: Type = 2;
pub const GVM_Max: Type = 3;
pub const GVM_Min: Type = 4;
}
pub mod GDALViewshedOutputType {
pub type Type = libc::c_uint;
pub const GVOT_NORMAL: Type = 1;
pub const GVOT_MIN_TARGET_HEIGHT_FROM_DEM: Type = 2;
pub const GVOT_MIN_TARGET_HEIGHT_FROM_GROUND: Type = 3;
}
extern "C" {
pub fn GDALViewshedGenerate(
hBand: GDALRasterBandH,
pszDriverName: *const libc::c_char,
pszTargetRasterName: *const libc::c_char,
papszCreationOptions: CSLConstList,
dfObserverX: f64,
dfObserverY: f64,
dfObserverHeight: f64,
dfTargetHeight: f64,
dfVisibleVal: f64,
dfInvisibleVal: f64,
dfOutOfRangeVal: f64,
dfNoDataVal: f64,
dfCurvCoeff: f64,
eMode: GDALViewshedMode::Type,
dfMaxDistance: f64,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
heightMode: GDALViewshedOutputType::Type,
papszExtraOptions: CSLConstList,
) -> GDALDatasetH;
}
extern "C" {
pub fn GDALRasterizeGeometries(
hDS: GDALDatasetH,
nBandCount: libc::c_int,
panBandList: *mut libc::c_int,
nGeomCount: libc::c_int,
pahGeometries: *mut OGRGeometryH,
pfnTransformer: GDALTransformerFunc,
pTransformArg: *mut libc::c_void,
padfGeomBurnValue: *mut f64,
papszOptions: *mut *mut libc::c_char,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRasterizeLayers(
hDS: GDALDatasetH,
nBandCount: libc::c_int,
panBandList: *mut libc::c_int,
nLayerCount: libc::c_int,
pahLayers: *mut OGRLayerH,
pfnTransformer: GDALTransformerFunc,
pTransformArg: *mut libc::c_void,
padfLayerBurnValues: *mut f64,
papszOptions: *mut *mut libc::c_char,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALRasterizeLayersBuf(
pData: *mut libc::c_void,
nBufXSize: libc::c_int,
nBufYSize: libc::c_int,
eBufType: GDALDataType::Type,
nPixelSpace: libc::c_int,
nLineSpace: libc::c_int,
nLayerCount: libc::c_int,
pahLayers: *mut OGRLayerH,
pszDstProjection: *const libc::c_char,
padfDstGeoTransform: *mut f64,
pfnTransformer: GDALTransformerFunc,
pTransformArg: *mut libc::c_void,
dfBurnValue: f64,
papszOptions: *mut *mut libc::c_char,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> CPLErr::Type;
}
pub mod GDALGridAlgorithm {
pub type Type = libc::c_uint;
pub const GGA_InverseDistanceToAPower: Type = 1;
pub const GGA_MovingAverage: Type = 2;
pub const GGA_NearestNeighbor: Type = 3;
pub const GGA_MetricMinimum: Type = 4;
pub const GGA_MetricMaximum: Type = 5;
pub const GGA_MetricRange: Type = 6;
pub const GGA_MetricCount: Type = 7;
pub const GGA_MetricAverageDistance: Type = 8;
pub const GGA_MetricAverageDistancePts: Type = 9;
pub const GGA_Linear: Type = 10;
pub const GGA_InverseDistanceToAPowerNearestNeighbor: Type = 11;
}
extern "C" {
pub fn GDALGridCreate(
arg1: GDALGridAlgorithm::Type,
arg2: *const libc::c_void,
arg3: GUInt32,
arg4: *const f64,
arg5: *const f64,
arg6: *const f64,
arg7: f64,
arg8: f64,
arg9: f64,
arg10: f64,
arg11: GUInt32,
arg12: GUInt32,
arg13: GDALDataType::Type,
arg14: *mut libc::c_void,
arg15: GDALProgressFunc,
arg16: *mut libc::c_void,
) -> CPLErr::Type;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALGridContext {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALGridContextCreate(
eAlgorithm: GDALGridAlgorithm::Type,
poOptions: *const libc::c_void,
nPoints: GUInt32,
padfX: *const f64,
padfY: *const f64,
padfZ: *const f64,
bCallerWillKeepPointArraysAlive: libc::c_int,
) -> *mut GDALGridContext;
}
extern "C" {
pub fn GDALGridContextFree(psContext: *mut GDALGridContext);
}
extern "C" {
pub fn GDALGridContextProcess(
psContext: *mut GDALGridContext,
dfXMin: f64,
dfXMax: f64,
dfYMin: f64,
dfYMax: f64,
nXSize: GUInt32,
nYSize: GUInt32,
eType: GDALDataType::Type,
pData: *mut libc::c_void,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALComputeMatchingPoints(
hFirstImage: GDALDatasetH,
hSecondImage: GDALDatasetH,
papszOptions: *mut *mut libc::c_char,
pnGCPCount: *mut libc::c_int,
) -> *mut GDAL_GCP;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALTriFacet {
pub anVertexIdx: [libc::c_int; 3usize],
pub anNeighborIdx: [libc::c_int; 3usize],
}
#[test]
fn bindgen_test_layout_GDALTriFacet() {
assert_eq!(
::std::mem::size_of::<GDALTriFacet>(),
24usize,
concat!("Size of: ", stringify!(GDALTriFacet))
);
assert_eq!(
::std::mem::align_of::<GDALTriFacet>(),
4usize,
concat!("Alignment of ", stringify!(GDALTriFacet))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALTriFacet>())).anVertexIdx as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(GDALTriFacet),
"::",
stringify!(anVertexIdx)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALTriFacet>())).anNeighborIdx as *const _ as usize },
12usize,
concat!(
"Offset of field: ",
stringify!(GDALTriFacet),
"::",
stringify!(anNeighborIdx)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALTriBarycentricCoefficients {
pub dfMul1X: f64,
pub dfMul1Y: f64,
pub dfMul2X: f64,
pub dfMul2Y: f64,
pub dfCstX: f64,
pub dfCstY: f64,
}
#[test]
fn bindgen_test_layout_GDALTriBarycentricCoefficients() {
assert_eq!(
::std::mem::size_of::<GDALTriBarycentricCoefficients>(),
48usize,
concat!("Size of: ", stringify!(GDALTriBarycentricCoefficients))
);
assert_eq!(
::std::mem::align_of::<GDALTriBarycentricCoefficients>(),
8usize,
concat!("Alignment of ", stringify!(GDALTriBarycentricCoefficients))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALTriBarycentricCoefficients>())).dfMul1X as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(GDALTriBarycentricCoefficients),
"::",
stringify!(dfMul1X)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALTriBarycentricCoefficients>())).dfMul1Y as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(GDALTriBarycentricCoefficients),
"::",
stringify!(dfMul1Y)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALTriBarycentricCoefficients>())).dfMul2X as *const _ as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(GDALTriBarycentricCoefficients),
"::",
stringify!(dfMul2X)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALTriBarycentricCoefficients>())).dfMul2Y as *const _ as usize
},
24usize,
concat!(
"Offset of field: ",
stringify!(GDALTriBarycentricCoefficients),
"::",
stringify!(dfMul2Y)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALTriBarycentricCoefficients>())).dfCstX as *const _ as usize
},
32usize,
concat!(
"Offset of field: ",
stringify!(GDALTriBarycentricCoefficients),
"::",
stringify!(dfCstX)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALTriBarycentricCoefficients>())).dfCstY as *const _ as usize
},
40usize,
concat!(
"Offset of field: ",
stringify!(GDALTriBarycentricCoefficients),
"::",
stringify!(dfCstY)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALTriangulation {
pub nFacets: libc::c_int,
pub pasFacets: *mut GDALTriFacet,
pub pasFacetCoefficients: *mut GDALTriBarycentricCoefficients,
}
#[test]
fn bindgen_test_layout_GDALTriangulation() {
assert_eq!(
::std::mem::size_of::<GDALTriangulation>(),
24usize,
concat!("Size of: ", stringify!(GDALTriangulation))
);
assert_eq!(
::std::mem::align_of::<GDALTriangulation>(),
8usize,
concat!("Alignment of ", stringify!(GDALTriangulation))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALTriangulation>())).nFacets as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(GDALTriangulation),
"::",
stringify!(nFacets)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALTriangulation>())).pasFacets as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(GDALTriangulation),
"::",
stringify!(pasFacets)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALTriangulation>())).pasFacetCoefficients as *const _ as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(GDALTriangulation),
"::",
stringify!(pasFacetCoefficients)
)
);
}
extern "C" {
pub fn GDALHasTriangulation() -> libc::c_int;
}
extern "C" {
pub fn GDALTriangulationCreateDelaunay(
nPoints: libc::c_int,
padfX: *const f64,
padfY: *const f64,
) -> *mut GDALTriangulation;
}
extern "C" {
pub fn GDALTriangulationComputeBarycentricCoefficients(
psDT: *mut GDALTriangulation,
padfX: *const f64,
padfY: *const f64,
) -> libc::c_int;
}
extern "C" {
pub fn GDALTriangulationComputeBarycentricCoordinates(
psDT: *const GDALTriangulation,
nFacetIdx: libc::c_int,
dfX: f64,
dfY: f64,
pdfL1: *mut f64,
pdfL2: *mut f64,
pdfL3: *mut f64,
) -> libc::c_int;
}
extern "C" {
pub fn GDALTriangulationFindFacetBruteForce(
psDT: *const GDALTriangulation,
dfX: f64,
dfY: f64,
panOutputFacetIdx: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALTriangulationFindFacetDirected(
psDT: *const GDALTriangulation,
nFacetIdx: libc::c_int,
dfX: f64,
dfY: f64,
panOutputFacetIdx: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn GDALTriangulationFree(psDT: *mut GDALTriangulation);
}
extern "C" {
pub fn GDALTriangulationTerminate();
}
extern "C" {
pub fn GDALOpenVerticalShiftGrid(
pszProj4Geoidgrids: *const libc::c_char,
pbError: *mut libc::c_int,
) -> GDALDatasetH;
}
extern "C" {
pub fn GDALApplyVerticalShiftGrid(
hSrcDataset: GDALDatasetH,
hGridDataset: GDALDatasetH,
bInverse: libc::c_int,
dfSrcUnitToMeter: f64,
dfDstUnitToMeter: f64,
papszOptions: *const *const libc::c_char,
) -> GDALDatasetH;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALInfoOptions {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALInfoOptionsForBinary {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALInfoOptionsNew(
papszArgv: *mut *mut libc::c_char,
psOptionsForBinary: *mut GDALInfoOptionsForBinary,
) -> *mut GDALInfoOptions;
}
extern "C" {
pub fn GDALInfoOptionsFree(psOptions: *mut GDALInfoOptions);
}
extern "C" {
pub fn GDALInfo(hDataset: GDALDatasetH, psOptions: *const GDALInfoOptions)
-> *mut libc::c_char;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALTranslateOptions {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALTranslateOptionsForBinary {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALTranslateOptionsNew(
papszArgv: *mut *mut libc::c_char,
psOptionsForBinary: *mut GDALTranslateOptionsForBinary,
) -> *mut GDALTranslateOptions;
}
extern "C" {
pub fn GDALTranslateOptionsFree(psOptions: *mut GDALTranslateOptions);
}
extern "C" {
pub fn GDALTranslateOptionsSetProgress(
psOptions: *mut GDALTranslateOptions,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
);
}
extern "C" {
pub fn GDALTranslate(
pszDestFilename: *const libc::c_char,
hSrcDataset: GDALDatasetH,
psOptions: *const GDALTranslateOptions,
pbUsageError: *mut libc::c_int,
) -> GDALDatasetH;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALWarpAppOptions {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALWarpAppOptionsForBinary {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALWarpAppOptionsNew(
papszArgv: *mut *mut libc::c_char,
psOptionsForBinary: *mut GDALWarpAppOptionsForBinary,
) -> *mut GDALWarpAppOptions;
}
extern "C" {
pub fn GDALWarpAppOptionsFree(psOptions: *mut GDALWarpAppOptions);
}
extern "C" {
pub fn GDALWarpAppOptionsSetProgress(
psOptions: *mut GDALWarpAppOptions,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
);
}
extern "C" {
pub fn GDALWarpAppOptionsSetQuiet(psOptions: *mut GDALWarpAppOptions, bQuiet: libc::c_int);
}
extern "C" {
pub fn GDALWarpAppOptionsSetWarpOption(
psOptions: *mut GDALWarpAppOptions,
pszKey: *const libc::c_char,
pszValue: *const libc::c_char,
);
}
extern "C" {
pub fn GDALWarp(
pszDest: *const libc::c_char,
hDstDS: GDALDatasetH,
nSrcCount: libc::c_int,
pahSrcDS: *mut GDALDatasetH,
psOptions: *const GDALWarpAppOptions,
pbUsageError: *mut libc::c_int,
) -> GDALDatasetH;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALVectorTranslateOptions {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALVectorTranslateOptionsForBinary {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALVectorTranslateOptionsNew(
papszArgv: *mut *mut libc::c_char,
psOptionsForBinary: *mut GDALVectorTranslateOptionsForBinary,
) -> *mut GDALVectorTranslateOptions;
}
extern "C" {
pub fn GDALVectorTranslateOptionsFree(psOptions: *mut GDALVectorTranslateOptions);
}
extern "C" {
pub fn GDALVectorTranslateOptionsSetProgress(
psOptions: *mut GDALVectorTranslateOptions,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
);
}
extern "C" {
pub fn GDALVectorTranslate(
pszDest: *const libc::c_char,
hDstDS: GDALDatasetH,
nSrcCount: libc::c_int,
pahSrcDS: *mut GDALDatasetH,
psOptions: *const GDALVectorTranslateOptions,
pbUsageError: *mut libc::c_int,
) -> GDALDatasetH;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALDEMProcessingOptions {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALDEMProcessingOptionsForBinary {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALDEMProcessingOptionsNew(
papszArgv: *mut *mut libc::c_char,
psOptionsForBinary: *mut GDALDEMProcessingOptionsForBinary,
) -> *mut GDALDEMProcessingOptions;
}
extern "C" {
pub fn GDALDEMProcessingOptionsFree(psOptions: *mut GDALDEMProcessingOptions);
}
extern "C" {
pub fn GDALDEMProcessingOptionsSetProgress(
psOptions: *mut GDALDEMProcessingOptions,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
);
}
extern "C" {
pub fn GDALDEMProcessing(
pszDestFilename: *const libc::c_char,
hSrcDataset: GDALDatasetH,
pszProcessing: *const libc::c_char,
pszColorFilename: *const libc::c_char,
psOptions: *const GDALDEMProcessingOptions,
pbUsageError: *mut libc::c_int,
) -> GDALDatasetH;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALNearblackOptions {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALNearblackOptionsForBinary {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALNearblackOptionsNew(
papszArgv: *mut *mut libc::c_char,
psOptionsForBinary: *mut GDALNearblackOptionsForBinary,
) -> *mut GDALNearblackOptions;
}
extern "C" {
pub fn GDALNearblackOptionsFree(psOptions: *mut GDALNearblackOptions);
}
extern "C" {
pub fn GDALNearblackOptionsSetProgress(
psOptions: *mut GDALNearblackOptions,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
);
}
extern "C" {
pub fn GDALNearblack(
pszDest: *const libc::c_char,
hDstDS: GDALDatasetH,
hSrcDS: GDALDatasetH,
psOptions: *const GDALNearblackOptions,
pbUsageError: *mut libc::c_int,
) -> GDALDatasetH;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALGridOptions {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALGridOptionsForBinary {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALGridOptionsNew(
papszArgv: *mut *mut libc::c_char,
psOptionsForBinary: *mut GDALGridOptionsForBinary,
) -> *mut GDALGridOptions;
}
extern "C" {
pub fn GDALGridOptionsFree(psOptions: *mut GDALGridOptions);
}
extern "C" {
pub fn GDALGridOptionsSetProgress(
psOptions: *mut GDALGridOptions,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
);
}
extern "C" {
pub fn GDALGrid(
pszDest: *const libc::c_char,
hSrcDS: GDALDatasetH,
psOptions: *const GDALGridOptions,
pbUsageError: *mut libc::c_int,
) -> GDALDatasetH;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALRasterizeOptions {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALRasterizeOptionsForBinary {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALRasterizeOptionsNew(
papszArgv: *mut *mut libc::c_char,
psOptionsForBinary: *mut GDALRasterizeOptionsForBinary,
) -> *mut GDALRasterizeOptions;
}
extern "C" {
pub fn GDALRasterizeOptionsFree(psOptions: *mut GDALRasterizeOptions);
}
extern "C" {
pub fn GDALRasterizeOptionsSetProgress(
psOptions: *mut GDALRasterizeOptions,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
);
}
extern "C" {
pub fn GDALRasterize(
pszDest: *const libc::c_char,
hDstDS: GDALDatasetH,
hSrcDS: GDALDatasetH,
psOptions: *const GDALRasterizeOptions,
pbUsageError: *mut libc::c_int,
) -> GDALDatasetH;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALBuildVRTOptions {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALBuildVRTOptionsForBinary {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALBuildVRTOptionsNew(
papszArgv: *mut *mut libc::c_char,
psOptionsForBinary: *mut GDALBuildVRTOptionsForBinary,
) -> *mut GDALBuildVRTOptions;
}
extern "C" {
pub fn GDALBuildVRTOptionsFree(psOptions: *mut GDALBuildVRTOptions);
}
extern "C" {
pub fn GDALBuildVRTOptionsSetProgress(
psOptions: *mut GDALBuildVRTOptions,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
);
}
extern "C" {
pub fn GDALBuildVRT(
pszDest: *const libc::c_char,
nSrcCount: libc::c_int,
pahSrcDS: *mut GDALDatasetH,
papszSrcDSNames: *const *const libc::c_char,
psOptions: *const GDALBuildVRTOptions,
pbUsageError: *mut libc::c_int,
) -> GDALDatasetH;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALMultiDimInfoOptions {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALMultiDimInfoOptionsForBinary {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALMultiDimInfoOptionsNew(
papszArgv: *mut *mut libc::c_char,
psOptionsForBinary: *mut GDALMultiDimInfoOptionsForBinary,
) -> *mut GDALMultiDimInfoOptions;
}
extern "C" {
pub fn GDALMultiDimInfoOptionsFree(psOptions: *mut GDALMultiDimInfoOptions);
}
extern "C" {
pub fn GDALMultiDimInfo(
hDataset: GDALDatasetH,
psOptions: *const GDALMultiDimInfoOptions,
) -> *mut libc::c_char;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALMultiDimTranslateOptions {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALMultiDimTranslateOptionsForBinary {
_unused: [u8; 0],
}
extern "C" {
pub fn GDALMultiDimTranslateOptionsNew(
papszArgv: *mut *mut libc::c_char,
psOptionsForBinary: *mut GDALMultiDimTranslateOptionsForBinary,
) -> *mut GDALMultiDimTranslateOptions;
}
extern "C" {
pub fn GDALMultiDimTranslateOptionsFree(psOptions: *mut GDALMultiDimTranslateOptions);
}
extern "C" {
pub fn GDALMultiDimTranslateOptionsSetProgress(
psOptions: *mut GDALMultiDimTranslateOptions,
pfnProgress: GDALProgressFunc,
pProgressData: *mut libc::c_void,
);
}
extern "C" {
pub fn GDALMultiDimTranslate(
pszDest: *const libc::c_char,
hDstDataset: GDALDatasetH,
nSrcCount: libc::c_int,
pahSrcDS: *mut GDALDatasetH,
psOptions: *const GDALMultiDimTranslateOptions,
pbUsageError: *mut libc::c_int,
) -> GDALDatasetH;
}
pub mod OGRAxisOrientation {
pub type Type = libc::c_uint;
pub const OAO_Other: Type = 0;
pub const OAO_North: Type = 1;
pub const OAO_South: Type = 2;
pub const OAO_East: Type = 3;
pub const OAO_West: Type = 4;
pub const OAO_Up: Type = 5;
pub const OAO_Down: Type = 6;
}
extern "C" {
pub fn OSRAxisEnumToName(eOrientation: OGRAxisOrientation::Type) -> *const libc::c_char;
}
extern "C" {
pub fn OSRSetPROJSearchPaths(papszPaths: *const *const libc::c_char);
}
extern "C" {
pub fn OSRGetPROJSearchPaths() -> *mut *mut libc::c_char;
}
extern "C" {
pub fn OSRSetPROJAuxDbPaths(papszPaths: *const *const libc::c_char);
}
extern "C" {
pub fn OSRGetPROJAuxDbPaths() -> *mut *mut libc::c_char;
}
extern "C" {
pub fn OSRSetPROJEnableNetwork(enabled: libc::c_int);
}
extern "C" {
pub fn OSRGetPROJEnableNetwork() -> libc::c_int;
}
extern "C" {
pub fn OSRGetPROJVersion(
pnMajor: *mut libc::c_int,
pnMinor: *mut libc::c_int,
pnPatch: *mut libc::c_int,
);
}
extern "C" {
pub fn OSRNewSpatialReference(arg1: *const libc::c_char) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn OSRCloneGeogCS(arg1: OGRSpatialReferenceH) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn OSRClone(arg1: OGRSpatialReferenceH) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn OSRDestroySpatialReference(arg1: OGRSpatialReferenceH);
}
extern "C" {
pub fn OSRReference(arg1: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRDereference(arg1: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRRelease(arg1: OGRSpatialReferenceH);
}
extern "C" {
pub fn OSRValidate(arg1: OGRSpatialReferenceH) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromEPSG(arg1: OGRSpatialReferenceH, arg2: libc::c_int) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromEPSGA(arg1: OGRSpatialReferenceH, arg2: libc::c_int) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromWkt(
arg1: OGRSpatialReferenceH,
arg2: *mut *mut libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromProj4(
arg1: OGRSpatialReferenceH,
arg2: *const libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromESRI(
arg1: OGRSpatialReferenceH,
arg2: *mut *mut libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromPCI(
hSRS: OGRSpatialReferenceH,
arg1: *const libc::c_char,
arg2: *const libc::c_char,
arg3: *mut f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromUSGS(
arg1: OGRSpatialReferenceH,
arg2: libc::c_long,
arg3: libc::c_long,
arg4: *mut f64,
arg5: libc::c_long,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromXML(arg1: OGRSpatialReferenceH, arg2: *const libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromDict(
arg1: OGRSpatialReferenceH,
arg2: *const libc::c_char,
arg3: *const libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromPanorama(
arg1: OGRSpatialReferenceH,
arg2: libc::c_long,
arg3: libc::c_long,
arg4: libc::c_long,
arg5: *mut f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromOzi(
arg1: OGRSpatialReferenceH,
arg2: *const *const libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromMICoordSys(
arg1: OGRSpatialReferenceH,
arg2: *const libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromERM(
arg1: OGRSpatialReferenceH,
arg2: *const libc::c_char,
arg3: *const libc::c_char,
arg4: *const libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRImportFromUrl(arg1: OGRSpatialReferenceH, arg2: *const libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OSRExportToWkt(arg1: OGRSpatialReferenceH, arg2: *mut *mut libc::c_char)
-> OGRErr::Type;
}
extern "C" {
pub fn OSRExportToWktEx(
arg1: OGRSpatialReferenceH,
ppszResult: *mut *mut libc::c_char,
papszOptions: *const *const libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRExportToPrettyWkt(
arg1: OGRSpatialReferenceH,
arg2: *mut *mut libc::c_char,
arg3: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRExportToPROJJSON(
hSRS: OGRSpatialReferenceH,
ppszReturn: *mut *mut libc::c_char,
papszOptions: *const *const libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRExportToProj4(
arg1: OGRSpatialReferenceH,
arg2: *mut *mut libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRExportToPCI(
arg1: OGRSpatialReferenceH,
arg2: *mut *mut libc::c_char,
arg3: *mut *mut libc::c_char,
arg4: *mut *mut f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRExportToUSGS(
arg1: OGRSpatialReferenceH,
arg2: *mut libc::c_long,
arg3: *mut libc::c_long,
arg4: *mut *mut f64,
arg5: *mut libc::c_long,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRExportToXML(
arg1: OGRSpatialReferenceH,
arg2: *mut *mut libc::c_char,
arg3: *const libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRExportToPanorama(
arg1: OGRSpatialReferenceH,
arg2: *mut libc::c_long,
arg3: *mut libc::c_long,
arg4: *mut libc::c_long,
arg5: *mut libc::c_long,
arg6: *mut f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRExportToMICoordSys(
arg1: OGRSpatialReferenceH,
arg2: *mut *mut libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRExportToERM(
arg1: OGRSpatialReferenceH,
arg2: *mut libc::c_char,
arg3: *mut libc::c_char,
arg4: *mut libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRMorphToESRI(arg1: OGRSpatialReferenceH) -> OGRErr::Type;
}
extern "C" {
pub fn OSRMorphFromESRI(arg1: OGRSpatialReferenceH) -> OGRErr::Type;
}
extern "C" {
pub fn OSRConvertToOtherProjection(
hSRS: OGRSpatialReferenceH,
pszTargetProjection: *const libc::c_char,
papszOptions: *const *const libc::c_char,
) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn OSRGetName(hSRS: OGRSpatialReferenceH) -> *const libc::c_char;
}
extern "C" {
pub fn OSRSetAttrValue(
hSRS: OGRSpatialReferenceH,
pszNodePath: *const libc::c_char,
pszNewNodeValue: *const libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRGetAttrValue(
hSRS: OGRSpatialReferenceH,
pszName: *const libc::c_char,
iChild: libc::c_int,
) -> *const libc::c_char;
}
extern "C" {
pub fn OSRSetAngularUnits(
arg1: OGRSpatialReferenceH,
arg2: *const libc::c_char,
arg3: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRGetAngularUnits(arg1: OGRSpatialReferenceH, arg2: *mut *mut libc::c_char) -> f64;
}
extern "C" {
pub fn OSRSetLinearUnits(
arg1: OGRSpatialReferenceH,
arg2: *const libc::c_char,
arg3: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetTargetLinearUnits(
arg1: OGRSpatialReferenceH,
arg2: *const libc::c_char,
arg3: *const libc::c_char,
arg4: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetLinearUnitsAndUpdateParameters(
arg1: OGRSpatialReferenceH,
arg2: *const libc::c_char,
arg3: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRGetLinearUnits(arg1: OGRSpatialReferenceH, arg2: *mut *mut libc::c_char) -> f64;
}
extern "C" {
pub fn OSRGetTargetLinearUnits(
arg1: OGRSpatialReferenceH,
arg2: *const libc::c_char,
arg3: *mut *mut libc::c_char,
) -> f64;
}
extern "C" {
pub fn OSRGetPrimeMeridian(arg1: OGRSpatialReferenceH, arg2: *mut *mut libc::c_char) -> f64;
}
extern "C" {
pub fn OSRIsGeographic(arg1: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRIsDerivedGeographic(arg1: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRIsLocal(arg1: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRIsProjected(arg1: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRIsCompound(arg1: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRIsGeocentric(arg1: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRIsVertical(arg1: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRIsDynamic(arg1: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRIsSameGeogCS(arg1: OGRSpatialReferenceH, arg2: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRIsSameVertCS(arg1: OGRSpatialReferenceH, arg2: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRIsSame(arg1: OGRSpatialReferenceH, arg2: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRIsSameEx(
arg1: OGRSpatialReferenceH,
arg2: OGRSpatialReferenceH,
papszOptions: *const *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn OSRSetCoordinateEpoch(hSRS: OGRSpatialReferenceH, dfCoordinateEpoch: f64);
}
extern "C" {
pub fn OSRGetCoordinateEpoch(hSRS: OGRSpatialReferenceH) -> f64;
}
extern "C" {
pub fn OSRSetLocalCS(hSRS: OGRSpatialReferenceH, pszName: *const libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetProjCS(hSRS: OGRSpatialReferenceH, pszName: *const libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetGeocCS(hSRS: OGRSpatialReferenceH, pszName: *const libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetWellKnownGeogCS(
hSRS: OGRSpatialReferenceH,
pszName: *const libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetFromUserInput(
hSRS: OGRSpatialReferenceH,
arg1: *const libc::c_char,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRCopyGeogCSFrom(
hSRS: OGRSpatialReferenceH,
hSrcSRS: OGRSpatialReferenceH,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetTOWGS84(
hSRS: OGRSpatialReferenceH,
arg1: f64,
arg2: f64,
arg3: f64,
arg4: f64,
arg5: f64,
arg6: f64,
arg7: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRGetTOWGS84(
hSRS: OGRSpatialReferenceH,
arg1: *mut f64,
arg2: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRAddGuessedTOWGS84(hSRS: OGRSpatialReferenceH) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetCompoundCS(
hSRS: OGRSpatialReferenceH,
pszName: *const libc::c_char,
hHorizSRS: OGRSpatialReferenceH,
hVertSRS: OGRSpatialReferenceH,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRPromoteTo3D(hSRS: OGRSpatialReferenceH, pszName: *const libc::c_char)
-> OGRErr::Type;
}
extern "C" {
pub fn OSRDemoteTo2D(hSRS: OGRSpatialReferenceH, pszName: *const libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetGeogCS(
hSRS: OGRSpatialReferenceH,
pszGeogName: *const libc::c_char,
pszDatumName: *const libc::c_char,
pszEllipsoidName: *const libc::c_char,
dfSemiMajor: f64,
dfInvFlattening: f64,
pszPMName: *const libc::c_char,
dfPMOffset: f64,
pszUnits: *const libc::c_char,
dfConvertToRadians: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetVertCS(
hSRS: OGRSpatialReferenceH,
pszVertCSName: *const libc::c_char,
pszVertDatumName: *const libc::c_char,
nVertDatumType: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRGetSemiMajor(arg1: OGRSpatialReferenceH, arg2: *mut OGRErr::Type) -> f64;
}
extern "C" {
pub fn OSRGetSemiMinor(arg1: OGRSpatialReferenceH, arg2: *mut OGRErr::Type) -> f64;
}
extern "C" {
pub fn OSRGetInvFlattening(arg1: OGRSpatialReferenceH, arg2: *mut OGRErr::Type) -> f64;
}
extern "C" {
pub fn OSRSetAuthority(
hSRS: OGRSpatialReferenceH,
pszTargetKey: *const libc::c_char,
pszAuthority: *const libc::c_char,
nCode: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRGetAuthorityCode(
hSRS: OGRSpatialReferenceH,
pszTargetKey: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn OSRGetAuthorityName(
hSRS: OGRSpatialReferenceH,
pszTargetKey: *const libc::c_char,
) -> *const libc::c_char;
}
extern "C" {
pub fn OSRGetAreaOfUse(
hSRS: OGRSpatialReferenceH,
pdfWestLongitudeDeg: *mut f64,
pdfSouthLatitudeDeg: *mut f64,
pdfEastLongitudeDeg: *mut f64,
pdfNorthLatitudeDeg: *mut f64,
ppszAreaName: *mut *const libc::c_char,
) -> libc::c_int;
}
extern "C" {
pub fn OSRSetProjection(arg1: OGRSpatialReferenceH, arg2: *const libc::c_char) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetProjParm(
arg1: OGRSpatialReferenceH,
arg2: *const libc::c_char,
arg3: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRGetProjParm(
hSRS: OGRSpatialReferenceH,
pszParamName: *const libc::c_char,
dfDefault: f64,
arg1: *mut OGRErr::Type,
) -> f64;
}
extern "C" {
pub fn OSRSetNormProjParm(
arg1: OGRSpatialReferenceH,
arg2: *const libc::c_char,
arg3: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRGetNormProjParm(
hSRS: OGRSpatialReferenceH,
pszParamName: *const libc::c_char,
dfDefault: f64,
arg1: *mut OGRErr::Type,
) -> f64;
}
extern "C" {
pub fn OSRSetUTM(
hSRS: OGRSpatialReferenceH,
nZone: libc::c_int,
bNorth: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRGetUTMZone(hSRS: OGRSpatialReferenceH, pbNorth: *mut libc::c_int) -> libc::c_int;
}
extern "C" {
pub fn OSRSetStatePlane(
hSRS: OGRSpatialReferenceH,
nZone: libc::c_int,
bNAD83: libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetStatePlaneWithUnits(
hSRS: OGRSpatialReferenceH,
nZone: libc::c_int,
bNAD83: libc::c_int,
pszOverrideUnitName: *const libc::c_char,
dfOverrideUnit: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRAutoIdentifyEPSG(hSRS: OGRSpatialReferenceH) -> OGRErr::Type;
}
extern "C" {
pub fn OSRFindMatches(
hSRS: OGRSpatialReferenceH,
papszOptions: *mut *mut libc::c_char,
pnEntries: *mut libc::c_int,
ppanMatchConfidence: *mut *mut libc::c_int,
) -> *mut OGRSpatialReferenceH;
}
extern "C" {
pub fn OSRFreeSRSArray(pahSRS: *mut OGRSpatialReferenceH);
}
extern "C" {
pub fn OSREPSGTreatsAsLatLong(hSRS: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSREPSGTreatsAsNorthingEasting(hSRS: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRGetAxis(
hSRS: OGRSpatialReferenceH,
pszTargetKey: *const libc::c_char,
iAxis: libc::c_int,
peOrientation: *mut OGRAxisOrientation::Type,
) -> *const libc::c_char;
}
extern "C" {
pub fn OSRGetAxesCount(hSRS: OGRSpatialReferenceH) -> libc::c_int;
}
extern "C" {
pub fn OSRSetAxes(
hSRS: OGRSpatialReferenceH,
pszTargetKey: *const libc::c_char,
pszXAxisName: *const libc::c_char,
eXAxisOrientation: OGRAxisOrientation::Type,
pszYAxisName: *const libc::c_char,
eYAxisOrientation: OGRAxisOrientation::Type,
) -> OGRErr::Type;
}
pub mod OSRAxisMappingStrategy {
pub type Type = libc::c_uint;
pub const OAMS_TRADITIONAL_GIS_ORDER: Type = 0;
pub const OAMS_AUTHORITY_COMPLIANT: Type = 1;
pub const OAMS_CUSTOM: Type = 2;
}
extern "C" {
pub fn OSRGetAxisMappingStrategy(hSRS: OGRSpatialReferenceH) -> OSRAxisMappingStrategy::Type;
}
extern "C" {
pub fn OSRSetAxisMappingStrategy(
hSRS: OGRSpatialReferenceH,
strategy: OSRAxisMappingStrategy::Type,
);
}
extern "C" {
pub fn OSRGetDataAxisToSRSAxisMapping(
hSRS: OGRSpatialReferenceH,
pnCount: *mut libc::c_int,
) -> *const libc::c_int;
}
extern "C" {
pub fn OSRSetDataAxisToSRSAxisMapping(
hSRS: OGRSpatialReferenceH,
nMappingSize: libc::c_int,
panMapping: *const libc::c_int,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetACEA(
hSRS: OGRSpatialReferenceH,
dfStdP1: f64,
dfStdP2: f64,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetAE(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetBonne(
hSRS: OGRSpatialReferenceH,
dfStandardParallel: f64,
dfCentralMeridian: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetCEA(
hSRS: OGRSpatialReferenceH,
dfStdP1: f64,
dfCentralMeridian: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetCS(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetEC(
hSRS: OGRSpatialReferenceH,
dfStdP1: f64,
dfStdP2: f64,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetEckert(
hSRS: OGRSpatialReferenceH,
nVariation: libc::c_int,
dfCentralMeridian: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetEckertIV(
hSRS: OGRSpatialReferenceH,
dfCentralMeridian: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetEckertVI(
hSRS: OGRSpatialReferenceH,
dfCentralMeridian: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetEquirectangular(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetEquirectangular2(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfPseudoStdParallel1: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetGS(
hSRS: OGRSpatialReferenceH,
dfCentralMeridian: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetGH(
hSRS: OGRSpatialReferenceH,
dfCentralMeridian: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetIGH(hSRS: OGRSpatialReferenceH) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetGEOS(
hSRS: OGRSpatialReferenceH,
dfCentralMeridian: f64,
dfSatelliteHeight: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetGaussSchreiberTMercator(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetGnomonic(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetHOM(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfAzimuth: f64,
dfRectToSkew: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetHOMAC(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfAzimuth: f64,
dfRectToSkew: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetHOM2PNO(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfLat1: f64,
dfLong1: f64,
dfLat2: f64,
dfLong2: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetIWMPolyconic(
hSRS: OGRSpatialReferenceH,
dfLat1: f64,
dfLat2: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetKrovak(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfAzimuth: f64,
dfPseudoStdParallelLat: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetLAEA(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetLCC(
hSRS: OGRSpatialReferenceH,
dfStdP1: f64,
dfStdP2: f64,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetLCC1SP(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetLCCB(
hSRS: OGRSpatialReferenceH,
dfStdP1: f64,
dfStdP2: f64,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetMC(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetMercator(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetMercator2SP(
hSRS: OGRSpatialReferenceH,
dfStdP1: f64,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetMollweide(
hSRS: OGRSpatialReferenceH,
dfCentralMeridian: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetNZMG(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetOS(
hSRS: OGRSpatialReferenceH,
dfOriginLat: f64,
dfCMeridian: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetOrthographic(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetPolyconic(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetPS(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetRobinson(
hSRS: OGRSpatialReferenceH,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetSinusoidal(
hSRS: OGRSpatialReferenceH,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetStereographic(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetSOC(
hSRS: OGRSpatialReferenceH,
dfLatitudeOfOrigin: f64,
dfCentralMeridian: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetTM(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetTMVariant(
hSRS: OGRSpatialReferenceH,
pszVariantName: *const libc::c_char,
dfCenterLat: f64,
dfCenterLong: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetTMG(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetTMSO(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
dfScale: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetTPED(
hSRS: OGRSpatialReferenceH,
dfLat1: f64,
dfLong1: f64,
dfLat2: f64,
dfLong2: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetVDG(
hSRS: OGRSpatialReferenceH,
dfCenterLong: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetWagner(
hSRS: OGRSpatialReferenceH,
nVariation: libc::c_int,
dfCenterLat: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetQSC(
hSRS: OGRSpatialReferenceH,
dfCenterLat: f64,
dfCenterLong: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetSCH(
hSRS: OGRSpatialReferenceH,
dfPegLat: f64,
dfPegLong: f64,
dfPegHeading: f64,
dfPegHgt: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRSetVerticalPerspective(
hSRS: OGRSpatialReferenceH,
dfTopoOriginLat: f64,
dfTopoOriginLon: f64,
dfTopoOriginHeight: f64,
dfViewPointHeight: f64,
dfFalseEasting: f64,
dfFalseNorthing: f64,
) -> OGRErr::Type;
}
extern "C" {
pub fn OSRCalcInvFlattening(dfSemiMajor: f64, dfSemiMinor: f64) -> f64;
}
extern "C" {
pub fn OSRCalcSemiMinorFromInvFlattening(dfSemiMajor: f64, dfInvFlattening: f64) -> f64;
}
extern "C" {
pub fn OSRCleanup();
}
pub mod OSRCRSType {
pub type Type = libc::c_uint;
pub const OSR_CRS_TYPE_GEOGRAPHIC_2D: Type = 0;
pub const OSR_CRS_TYPE_GEOGRAPHIC_3D: Type = 1;
pub const OSR_CRS_TYPE_GEOCENTRIC: Type = 2;
pub const OSR_CRS_TYPE_PROJECTED: Type = 3;
pub const OSR_CRS_TYPE_VERTICAL: Type = 4;
pub const OSR_CRS_TYPE_COMPOUND: Type = 5;
pub const OSR_CRS_TYPE_OTHER: Type = 6;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OSRCRSInfo {
pub pszAuthName: *mut libc::c_char,
pub pszCode: *mut libc::c_char,
pub pszName: *mut libc::c_char,
pub eType: OSRCRSType::Type,
pub bDeprecated: libc::c_int,
pub bBboxValid: libc::c_int,
pub dfWestLongitudeDeg: f64,
pub dfSouthLatitudeDeg: f64,
pub dfEastLongitudeDeg: f64,
pub dfNorthLatitudeDeg: f64,
pub pszAreaName: *mut libc::c_char,
pub pszProjectionMethod: *mut libc::c_char,
}
#[test]
fn bindgen_test_layout_OSRCRSInfo() {
assert_eq!(
::std::mem::size_of::<OSRCRSInfo>(),
88usize,
concat!("Size of: ", stringify!(OSRCRSInfo))
);
assert_eq!(
::std::mem::align_of::<OSRCRSInfo>(),
8usize,
concat!("Alignment of ", stringify!(OSRCRSInfo))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).pszAuthName as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(pszAuthName)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).pszCode as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(pszCode)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).pszName as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(pszName)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).eType as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(eType)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).bDeprecated as *const _ as usize },
28usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(bDeprecated)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).bBboxValid as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(bBboxValid)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).dfWestLongitudeDeg as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(dfWestLongitudeDeg)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).dfSouthLatitudeDeg as *const _ as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(dfSouthLatitudeDeg)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).dfEastLongitudeDeg as *const _ as usize },
56usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(dfEastLongitudeDeg)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).dfNorthLatitudeDeg as *const _ as usize },
64usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(dfNorthLatitudeDeg)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).pszAreaName as *const _ as usize },
72usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(pszAreaName)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<OSRCRSInfo>())).pszProjectionMethod as *const _ as usize },
80usize,
concat!(
"Offset of field: ",
stringify!(OSRCRSInfo),
"::",
stringify!(pszProjectionMethod)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OSRCRSListParameters {
_unused: [u8; 0],
}
extern "C" {
pub fn OSRGetCRSInfoListFromDatabase(
pszAuthName: *const libc::c_char,
params: *const OSRCRSListParameters,
pnOutResultCount: *mut libc::c_int,
) -> *mut *mut OSRCRSInfo;
}
extern "C" {
pub fn OSRDestroyCRSInfoList(list: *mut *mut OSRCRSInfo);
}
extern "C" {
pub fn OCTNewCoordinateTransformation(
hSourceSRS: OGRSpatialReferenceH,
hTargetSRS: OGRSpatialReferenceH,
) -> OGRCoordinateTransformationH;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct OGRCoordinateTransformationOptions {
_unused: [u8; 0],
}
pub type OGRCoordinateTransformationOptionsH = *mut OGRCoordinateTransformationOptions;
extern "C" {
pub fn OCTNewCoordinateTransformationOptions() -> OGRCoordinateTransformationOptionsH;
}
extern "C" {
pub fn OCTCoordinateTransformationOptionsSetOperation(
hOptions: OGRCoordinateTransformationOptionsH,
pszCO: *const libc::c_char,
bReverseCO: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OCTCoordinateTransformationOptionsSetAreaOfInterest(
hOptions: OGRCoordinateTransformationOptionsH,
dfWestLongitudeDeg: f64,
dfSouthLatitudeDeg: f64,
dfEastLongitudeDeg: f64,
dfNorthLatitudeDeg: f64,
) -> libc::c_int;
}
extern "C" {
pub fn OCTCoordinateTransformationOptionsSetDesiredAccuracy(
hOptions: OGRCoordinateTransformationOptionsH,
dfAccuracy: f64,
) -> libc::c_int;
}
extern "C" {
pub fn OCTCoordinateTransformationOptionsSetBallparkAllowed(
hOptions: OGRCoordinateTransformationOptionsH,
bAllowBallpark: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OCTDestroyCoordinateTransformationOptions(arg1: OGRCoordinateTransformationOptionsH);
}
extern "C" {
pub fn OCTNewCoordinateTransformationEx(
hSourceSRS: OGRSpatialReferenceH,
hTargetSRS: OGRSpatialReferenceH,
hOptions: OGRCoordinateTransformationOptionsH,
) -> OGRCoordinateTransformationH;
}
extern "C" {
pub fn OCTClone(hTransform: OGRCoordinateTransformationH) -> OGRCoordinateTransformationH;
}
extern "C" {
pub fn OCTGetSourceCS(hTransform: OGRCoordinateTransformationH) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn OCTGetTargetCS(hTransform: OGRCoordinateTransformationH) -> OGRSpatialReferenceH;
}
extern "C" {
pub fn OCTGetInverse(hTransform: OGRCoordinateTransformationH) -> OGRCoordinateTransformationH;
}
extern "C" {
pub fn OCTDestroyCoordinateTransformation(arg1: OGRCoordinateTransformationH);
}
extern "C" {
pub fn OCTTransform(
hCT: OGRCoordinateTransformationH,
nCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
) -> libc::c_int;
}
extern "C" {
pub fn OCTTransformEx(
hCT: OGRCoordinateTransformationH,
nCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
pabSuccess: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OCTTransform4D(
hCT: OGRCoordinateTransformationH,
nCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
t: *mut f64,
pabSuccess: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OCTTransform4DWithErrorCodes(
hCT: OGRCoordinateTransformationH,
nCount: libc::c_int,
x: *mut f64,
y: *mut f64,
z: *mut f64,
t: *mut f64,
panErrorCodes: *mut libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn OCTTransformBounds(
hCT: OGRCoordinateTransformationH,
xmin: f64,
ymin: f64,
xmax: f64,
ymax: f64,
out_xmin: *mut f64,
out_ymin: *mut f64,
out_xmax: *mut f64,
out_ymax: *mut f64,
densify_pts: libc::c_int,
) -> libc::c_int;
}
pub type CPLThreadFunc = ::std::option::Option<unsafe extern "C" fn(arg1: *mut libc::c_void)>;
extern "C" {
pub fn CPLLockFile(pszPath: *const libc::c_char, dfWaitInSeconds: f64) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLUnlockFile(hLock: *mut libc::c_void);
}
extern "C" {
pub fn CPLCreateMutex() -> *mut libc::c_void;
}
extern "C" {
pub fn CPLCreateMutexEx(nOptions: libc::c_int) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLCreateOrAcquireMutex(
arg1: *mut *mut libc::c_void,
dfWaitInSeconds: f64,
) -> libc::c_int;
}
extern "C" {
pub fn CPLCreateOrAcquireMutexEx(
arg1: *mut *mut libc::c_void,
dfWaitInSeconds: f64,
nOptions: libc::c_int,
) -> libc::c_int;
}
extern "C" {
pub fn CPLAcquireMutex(hMutex: *mut libc::c_void, dfWaitInSeconds: f64) -> libc::c_int;
}
extern "C" {
pub fn CPLReleaseMutex(hMutex: *mut libc::c_void);
}
extern "C" {
pub fn CPLDestroyMutex(hMutex: *mut libc::c_void);
}
extern "C" {
pub fn CPLCleanupMasterMutex();
}
extern "C" {
pub fn CPLCreateCond() -> *mut libc::c_void;
}
extern "C" {
pub fn CPLCondWait(hCond: *mut libc::c_void, hMutex: *mut libc::c_void);
}
pub mod CPLCondTimedWaitReason {
pub type Type = libc::c_uint;
pub const COND_TIMED_WAIT_COND: Type = 0;
pub const COND_TIMED_WAIT_TIME_OUT: Type = 1;
pub const COND_TIMED_WAIT_OTHER: Type = 2;
}
extern "C" {
pub fn CPLCondTimedWait(
hCond: *mut libc::c_void,
hMutex: *mut libc::c_void,
dfWaitInSeconds: f64,
) -> CPLCondTimedWaitReason::Type;
}
extern "C" {
pub fn CPLCondSignal(hCond: *mut libc::c_void);
}
extern "C" {
pub fn CPLCondBroadcast(hCond: *mut libc::c_void);
}
extern "C" {
pub fn CPLDestroyCond(hCond: *mut libc::c_void);
}
extern "C" {
pub fn CPLGetPID() -> GIntBig;
}
extern "C" {
pub fn CPLGetCurrentProcessID() -> libc::c_int;
}
extern "C" {
pub fn CPLCreateThread(pfnMain: CPLThreadFunc, pArg: *mut libc::c_void) -> libc::c_int;
}
extern "C" {
pub fn CPLCreateJoinableThread(
pfnMain: CPLThreadFunc,
pArg: *mut libc::c_void,
) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLJoinThread(hJoinableThread: *mut libc::c_void);
}
extern "C" {
pub fn CPLSleep(dfWaitInSeconds: f64);
}
extern "C" {
pub fn CPLGetThreadingModel() -> *const libc::c_char;
}
extern "C" {
pub fn CPLGetNumCPUs() -> libc::c_int;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct _CPLLock {
_unused: [u8; 0],
}
pub type CPLLock = _CPLLock;
pub mod CPLLockType {
pub type Type = libc::c_uint;
pub const LOCK_RECURSIVE_MUTEX: Type = 0;
pub const LOCK_ADAPTIVE_MUTEX: Type = 1;
pub const LOCK_SPIN: Type = 2;
}
extern "C" {
pub fn CPLCreateLock(eType: CPLLockType::Type) -> *mut CPLLock;
}
extern "C" {
pub fn CPLCreateOrAcquireLock(arg1: *mut *mut CPLLock, eType: CPLLockType::Type)
-> libc::c_int;
}
extern "C" {
pub fn CPLAcquireLock(arg1: *mut CPLLock) -> libc::c_int;
}
extern "C" {
pub fn CPLReleaseLock(arg1: *mut CPLLock);
}
extern "C" {
pub fn CPLDestroyLock(arg1: *mut CPLLock);
}
extern "C" {
pub fn CPLLockSetDebugPerf(arg1: *mut CPLLock, bEnableIn: libc::c_int);
}
extern "C" {
pub fn CPLGetTLS(nIndex: libc::c_int) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLGetTLSEx(
nIndex: libc::c_int,
pbMemoryErrorOccurred: *mut libc::c_int,
) -> *mut libc::c_void;
}
extern "C" {
pub fn CPLSetTLS(nIndex: libc::c_int, pData: *mut libc::c_void, bFreeOnExit: libc::c_int);
}
pub type CPLTLSFreeFunc = ::std::option::Option<unsafe extern "C" fn(pData: *mut libc::c_void)>;
extern "C" {
pub fn CPLSetTLSWithFreeFunc(
nIndex: libc::c_int,
pData: *mut libc::c_void,
pfnFree: CPLTLSFreeFunc,
);
}
extern "C" {
pub fn CPLSetTLSWithFreeFuncEx(
nIndex: libc::c_int,
pData: *mut libc::c_void,
pfnFree: CPLTLSFreeFunc,
pbMemoryErrorOccurred: *mut libc::c_int,
);
}
extern "C" {
pub fn CPLCleanupTLS();
}
pub mod GDALResampleAlg {
pub type Type = libc::c_uint;
pub const GRA_NearestNeighbour: Type = 0;
pub const GRA_Bilinear: Type = 1;
pub const GRA_Cubic: Type = 2;
pub const GRA_CubicSpline: Type = 3;
pub const GRA_Lanczos: Type = 4;
pub const GRA_Average: Type = 5;
pub const GRA_Mode: Type = 6;
pub const GRA_Max: Type = 8;
pub const GRA_Min: Type = 9;
pub const GRA_Med: Type = 10;
pub const GRA_Q1: Type = 11;
pub const GRA_Q3: Type = 12;
pub const GRA_Sum: Type = 13;
pub const GRA_RMS: Type = 14;
pub const GRA_LAST_VALUE: Type = 14;
}
pub type GDALMaskFunc = ::std::option::Option<
unsafe extern "C" fn(
pMaskFuncArg: *mut libc::c_void,
nBandCount: libc::c_int,
eType: GDALDataType::Type,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
papabyImageData: *mut *mut GByte,
bMaskIsFloat: libc::c_int,
pMask: *mut libc::c_void,
) -> libc::c_int,
>;
extern "C" {
pub fn GDALWarpNoDataMasker(
pMaskFuncArg: *mut libc::c_void,
nBandCount: libc::c_int,
eType: GDALDataType::Type,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
papabyImageData: *mut *mut GByte,
bMaskIsFloat: libc::c_int,
pValidityMask: *mut libc::c_void,
pbOutAllValid: *mut libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALWarpDstAlphaMasker(
pMaskFuncArg: *mut libc::c_void,
nBandCount: libc::c_int,
eType: GDALDataType::Type,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
arg1: *mut *mut GByte,
bMaskIsFloat: libc::c_int,
pValidityMask: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALWarpSrcAlphaMasker(
pMaskFuncArg: *mut libc::c_void,
nBandCount: libc::c_int,
eType: GDALDataType::Type,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
arg1: *mut *mut GByte,
bMaskIsFloat: libc::c_int,
pValidityMask: *mut libc::c_void,
pbOutAllOpaque: *mut libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALWarpSrcMaskMasker(
pMaskFuncArg: *mut libc::c_void,
nBandCount: libc::c_int,
eType: GDALDataType::Type,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
arg1: *mut *mut GByte,
bMaskIsFloat: libc::c_int,
pValidityMask: *mut libc::c_void,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALWarpCutlineMasker(
pMaskFuncArg: *mut libc::c_void,
nBandCount: libc::c_int,
eType: GDALDataType::Type,
nXOff: libc::c_int,
nYOff: libc::c_int,
nXSize: libc::c_int,
nYSize: libc::c_int,
arg1: *mut *mut GByte,
bMaskIsFloat: libc::c_int,
pValidityMask: *mut libc::c_void,
) -> CPLErr::Type;
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct GDALWarpOptions {
pub papszWarpOptions: *mut *mut libc::c_char,
pub dfWarpMemoryLimit: f64,
pub eResampleAlg: GDALResampleAlg::Type,
pub eWorkingDataType: GDALDataType::Type,
pub hSrcDS: GDALDatasetH,
pub hDstDS: GDALDatasetH,
pub nBandCount: libc::c_int,
pub panSrcBands: *mut libc::c_int,
pub panDstBands: *mut libc::c_int,
pub nSrcAlphaBand: libc::c_int,
pub nDstAlphaBand: libc::c_int,
pub padfSrcNoDataReal: *mut f64,
pub padfSrcNoDataImag: *mut f64,
pub padfDstNoDataReal: *mut f64,
pub padfDstNoDataImag: *mut f64,
pub pfnProgress: GDALProgressFunc,
pub pProgressArg: *mut libc::c_void,
pub pfnTransformer: GDALTransformerFunc,
pub pTransformerArg: *mut libc::c_void,
pub papfnSrcPerBandValidityMaskFunc: *mut GDALMaskFunc,
pub papSrcPerBandValidityMaskFuncArg: *mut *mut libc::c_void,
pub pfnSrcValidityMaskFunc: GDALMaskFunc,
pub pSrcValidityMaskFuncArg: *mut libc::c_void,
pub pfnSrcDensityMaskFunc: GDALMaskFunc,
pub pSrcDensityMaskFuncArg: *mut libc::c_void,
pub pfnDstDensityMaskFunc: GDALMaskFunc,
pub pDstDensityMaskFuncArg: *mut libc::c_void,
pub pfnDstValidityMaskFunc: GDALMaskFunc,
pub pDstValidityMaskFuncArg: *mut libc::c_void,
pub pfnPreWarpChunkProcessor: ::std::option::Option<
unsafe extern "C" fn(pKern: *mut libc::c_void, pArg: *mut libc::c_void) -> CPLErr::Type,
>,
pub pPreWarpProcessorArg: *mut libc::c_void,
pub pfnPostWarpChunkProcessor: ::std::option::Option<
unsafe extern "C" fn(pKern: *mut libc::c_void, pArg: *mut libc::c_void) -> CPLErr::Type,
>,
pub pPostWarpProcessorArg: *mut libc::c_void,
pub hCutline: *mut libc::c_void,
pub dfCutlineBlendDist: f64,
}
#[test]
fn bindgen_test_layout_GDALWarpOptions() {
assert_eq!(
::std::mem::size_of::<GDALWarpOptions>(),
264usize,
concat!("Size of: ", stringify!(GDALWarpOptions))
);
assert_eq!(
::std::mem::align_of::<GDALWarpOptions>(),
8usize,
concat!("Alignment of ", stringify!(GDALWarpOptions))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).papszWarpOptions as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(papszWarpOptions)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).dfWarpMemoryLimit as *const _ as usize
},
8usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(dfWarpMemoryLimit)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).eResampleAlg as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(eResampleAlg)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).eWorkingDataType as *const _ as usize
},
20usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(eWorkingDataType)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).hSrcDS as *const _ as usize },
24usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(hSrcDS)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).hDstDS as *const _ as usize },
32usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(hDstDS)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).nBandCount as *const _ as usize },
40usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(nBandCount)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).panSrcBands as *const _ as usize },
48usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(panSrcBands)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).panDstBands as *const _ as usize },
56usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(panDstBands)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).nSrcAlphaBand as *const _ as usize },
64usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(nSrcAlphaBand)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).nDstAlphaBand as *const _ as usize },
68usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(nDstAlphaBand)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).padfSrcNoDataReal as *const _ as usize
},
72usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(padfSrcNoDataReal)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).padfSrcNoDataImag as *const _ as usize
},
80usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(padfSrcNoDataImag)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).padfDstNoDataReal as *const _ as usize
},
88usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(padfDstNoDataReal)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).padfDstNoDataImag as *const _ as usize
},
96usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(padfDstNoDataImag)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).pfnProgress as *const _ as usize },
104usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pfnProgress)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).pProgressArg as *const _ as usize },
112usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pProgressArg)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).pfnTransformer as *const _ as usize },
120usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pfnTransformer)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).pTransformerArg as *const _ as usize },
128usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pTransformerArg)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).papfnSrcPerBandValidityMaskFunc as *const _
as usize
},
136usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(papfnSrcPerBandValidityMaskFunc)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).papSrcPerBandValidityMaskFuncArg as *const _
as usize
},
144usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(papSrcPerBandValidityMaskFuncArg)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pfnSrcValidityMaskFunc as *const _ as usize
},
152usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pfnSrcValidityMaskFunc)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pSrcValidityMaskFuncArg as *const _ as usize
},
160usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pSrcValidityMaskFuncArg)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pfnSrcDensityMaskFunc as *const _ as usize
},
168usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pfnSrcDensityMaskFunc)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pSrcDensityMaskFuncArg as *const _ as usize
},
176usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pSrcDensityMaskFuncArg)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pfnDstDensityMaskFunc as *const _ as usize
},
184usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pfnDstDensityMaskFunc)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pDstDensityMaskFuncArg as *const _ as usize
},
192usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pDstDensityMaskFuncArg)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pfnDstValidityMaskFunc as *const _ as usize
},
200usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pfnDstValidityMaskFunc)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pDstValidityMaskFuncArg as *const _ as usize
},
208usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pDstValidityMaskFuncArg)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pfnPreWarpChunkProcessor as *const _
as usize
},
216usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pfnPreWarpChunkProcessor)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pPreWarpProcessorArg as *const _ as usize
},
224usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pPreWarpProcessorArg)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pfnPostWarpChunkProcessor as *const _
as usize
},
232usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pfnPostWarpChunkProcessor)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).pPostWarpProcessorArg as *const _ as usize
},
240usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(pPostWarpProcessorArg)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<GDALWarpOptions>())).hCutline as *const _ as usize },
248usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(hCutline)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<GDALWarpOptions>())).dfCutlineBlendDist as *const _ as usize
},
256usize,
concat!(
"Offset of field: ",
stringify!(GDALWarpOptions),
"::",
stringify!(dfCutlineBlendDist)
)
);
}
extern "C" {
pub fn GDALCreateWarpOptions() -> *mut GDALWarpOptions;
}
extern "C" {
pub fn GDALDestroyWarpOptions(arg1: *mut GDALWarpOptions);
}
extern "C" {
pub fn GDALCloneWarpOptions(arg1: *const GDALWarpOptions) -> *mut GDALWarpOptions;
}
extern "C" {
pub fn GDALWarpInitDstNoDataReal(arg1: *mut GDALWarpOptions, dNoDataReal: f64);
}
extern "C" {
pub fn GDALWarpInitSrcNoDataReal(arg1: *mut GDALWarpOptions, dNoDataReal: f64);
}
extern "C" {
pub fn GDALWarpInitNoDataReal(arg1: *mut GDALWarpOptions, dNoDataReal: f64);
}
extern "C" {
pub fn GDALWarpInitDstNoDataImag(arg1: *mut GDALWarpOptions, dNoDataImag: f64);
}
extern "C" {
pub fn GDALWarpInitSrcNoDataImag(arg1: *mut GDALWarpOptions, dNoDataImag: f64);
}
extern "C" {
pub fn GDALWarpResolveWorkingDataType(arg1: *mut GDALWarpOptions);
}
extern "C" {
pub fn GDALWarpInitDefaultBandMapping(arg1: *mut GDALWarpOptions, nBandCount: libc::c_int);
}
extern "C" {
pub fn GDALSerializeWarpOptions(arg1: *const GDALWarpOptions) -> *mut CPLXMLNode;
}
extern "C" {
pub fn GDALDeserializeWarpOptions(arg1: *mut CPLXMLNode) -> *mut GDALWarpOptions;
}
extern "C" {
pub fn GDALReprojectImage(
hSrcDS: GDALDatasetH,
pszSrcWKT: *const libc::c_char,
hDstDS: GDALDatasetH,
pszDstWKT: *const libc::c_char,
eResampleAlg: GDALResampleAlg::Type,
dfWarpMemoryLimit: f64,
dfMaxError: f64,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
psOptions: *mut GDALWarpOptions,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALCreateAndReprojectImage(
hSrcDS: GDALDatasetH,
pszSrcWKT: *const libc::c_char,
pszDstFilename: *const libc::c_char,
pszDstWKT: *const libc::c_char,
hDstDriver: GDALDriverH,
papszCreateOptions: *mut *mut libc::c_char,
eResampleAlg: GDALResampleAlg::Type,
dfWarpMemoryLimit: f64,
dfMaxError: f64,
pfnProgress: GDALProgressFunc,
pProgressArg: *mut libc::c_void,
psOptions: *mut GDALWarpOptions,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALAutoCreateWarpedVRT(
hSrcDS: GDALDatasetH,
pszSrcWKT: *const libc::c_char,
pszDstWKT: *const libc::c_char,
eResampleAlg: GDALResampleAlg::Type,
dfMaxError: f64,
psOptions: *const GDALWarpOptions,
) -> GDALDatasetH;
}
extern "C" {
pub fn GDALAutoCreateWarpedVRTEx(
hSrcDS: GDALDatasetH,
pszSrcWKT: *const libc::c_char,
pszDstWKT: *const libc::c_char,
eResampleAlg: GDALResampleAlg::Type,
dfMaxError: f64,
psOptions: *const GDALWarpOptions,
papszTransformerOptions: CSLConstList,
) -> GDALDatasetH;
}
extern "C" {
pub fn GDALCreateWarpedVRT(
hSrcDS: GDALDatasetH,
nPixels: libc::c_int,
nLines: libc::c_int,
padfGeoTransform: *mut f64,
psOptions: *mut GDALWarpOptions,
) -> GDALDatasetH;
}
extern "C" {
pub fn GDALInitializeWarpedVRT(hDS: GDALDatasetH, psWO: *mut GDALWarpOptions) -> CPLErr::Type;
}
pub type GDALWarpOperationH = *mut libc::c_void;
extern "C" {
pub fn GDALCreateWarpOperation(arg1: *const GDALWarpOptions) -> GDALWarpOperationH;
}
extern "C" {
pub fn GDALDestroyWarpOperation(arg1: GDALWarpOperationH);
}
extern "C" {
pub fn GDALChunkAndWarpImage(
arg1: GDALWarpOperationH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: libc::c_int,
arg5: libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALChunkAndWarpMulti(
arg1: GDALWarpOperationH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: libc::c_int,
arg5: libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALWarpRegion(
arg1: GDALWarpOperationH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: libc::c_int,
arg5: libc::c_int,
arg6: libc::c_int,
arg7: libc::c_int,
arg8: libc::c_int,
arg9: libc::c_int,
) -> CPLErr::Type;
}
extern "C" {
pub fn GDALWarpRegionToBuffer(
arg1: GDALWarpOperationH,
arg2: libc::c_int,
arg3: libc::c_int,
arg4: libc::c_int,
arg5: libc::c_int,
arg6: *mut libc::c_void,
arg7: GDALDataType::Type,
arg8: libc::c_int,
arg9: libc::c_int,
arg10: libc::c_int,
arg11: libc::c_int,
) -> CPLErr::Type;
}
pub mod OGRErr {
#[doc = " Type for a OGR error"]
#[doc = ""]
#[doc = " <div rustbindgen replaces=\"OGRErr\"></div>"]
pub type Type = libc::c_uint;
#[doc = " Success"]
#[doc = ""]
#[doc = " <div rustbindgen replaces=\"OGRERR_NONE\"></div>"]
pub const OGRERR_NONE: Type = 0;
#[doc = " Not enough data to deserialize"]
#[doc = ""]
#[doc = " <div rustbindgen replaces=\"OGRERR_NOT_ENOUGH_DATA\"></div>"]
pub const OGRERR_NOT_ENOUGH_DATA: Type = 1;
#[doc = " Not enough memory"]
#[doc = ""]
#[doc = " <div rustbindgen replaces=\"OGRERR_NOT_ENOUGH_MEMORY\"></div>"]
pub const OGRERR_NOT_ENOUGH_MEMORY: Type = 2;
#[doc = " Unsupported geometry type"]
#[doc = ""]
#[doc = " <div rustbindgen replaces=\"OGRERR_UNSUPPORTED_GEOMETRY_TYPE\"></div>"]
pub const OGRERR_UNSUPPORTED_GEOMETRY_TYPE: Type = 3;
#[doc = " Unsupported operation"]
#[doc = ""]
#[doc = " <div rustbindgen replaces=\"OGRERR_UNSUPPORTED_OPERATION\"></div>"]
pub const OGRERR_UNSUPPORTED_OPERATION: Type = 4;
#[doc = " Corrupt data"]
#[doc = ""]
#[doc = " <div rustbindgen replaces=\"OGRERR_CORRUPT_DATA\"></div>"]
pub const OGRERR_CORRUPT_DATA: Type = 5;
#[doc = " Failure"]
#[doc = ""]
#[doc = " <div rustbindgen replaces=\"OGRERR_FAILURE\"></div>"]
pub const OGRERR_FAILURE: Type = 6;
#[doc = " Unsupported SRS"]
#[doc = ""]
#[doc = " <div rustbindgen replaces=\"OGRERR_UNSUPPORTED_SRS\"></div>"]
pub const OGRERR_UNSUPPORTED_SRS: Type = 7;
#[doc = " Invalid handle"]
#[doc = ""]
#[doc = " <div rustbindgen replaces=\"INVALID_HANDLE\"></div>"]
pub const INVALID_HANDLE: Type = 8;
#[doc = " Non existing feature. Added in GDAL 2.0"]
#[doc = ""]
#[doc = " <div rustbindgen replaces=\"NON_EXISTING_FEATURE\"></div>"]
pub const NON_EXISTING_FEATURE: Type = 9;
}
pub type __builtin_va_list = [__va_list_tag; 1usize];
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct __va_list_tag {
pub gp_offset: libc::c_uint,
pub fp_offset: libc::c_uint,
pub overflow_arg_area: *mut libc::c_void,
pub reg_save_area: *mut libc::c_void,
}
#[test]
fn bindgen_test_layout___va_list_tag() {
assert_eq!(
::std::mem::size_of::<__va_list_tag>(),
24usize,
concat!("Size of: ", stringify!(__va_list_tag))
);
assert_eq!(
::std::mem::align_of::<__va_list_tag>(),
8usize,
concat!("Alignment of ", stringify!(__va_list_tag))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__va_list_tag>())).gp_offset as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(__va_list_tag),
"::",
stringify!(gp_offset)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__va_list_tag>())).fp_offset as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(__va_list_tag),
"::",
stringify!(fp_offset)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__va_list_tag>())).overflow_arg_area as *const _ as usize },
8usize,
concat!(
"Offset of field: ",
stringify!(__va_list_tag),
"::",
stringify!(overflow_arg_area)
)
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<__va_list_tag>())).reg_save_area as *const _ as usize },
16usize,
concat!(
"Offset of field: ",
stringify!(__va_list_tag),
"::",
stringify!(reg_save_area)
)
);
}
| 27.906545 | 100 | 0.594029 |
b9433849853a87a5e0d6686cb902e6d67e930858 | 268 | macro_rules! some_macro {
($other: expr) => ({
$other(None)
//~^ this function takes 0 parameters but 1 parameter was supplied
})
}
fn some_function() {}
fn main() {
some_macro!(some_function);
//~^ in this expansion of some_macro!
}
| 19.142857 | 74 | 0.604478 |
79cb7fb0b7692b3fabc63158834a8cee39e5280a | 31,056 | //! Inlining pass for MIR functions
use rustc::hir::CodegenFnAttrFlags;
use rustc::hir::def_id::DefId;
use rustc_index::bit_set::BitSet;
use rustc_index::vec::{Idx, IndexVec};
use rustc::mir::*;
use rustc::mir::visit::*;
use rustc::ty::{self, Instance, InstanceDef, ParamEnv, Ty, TyCtxt};
use rustc::ty::subst::{Subst, SubstsRef};
use std::collections::VecDeque;
use std::iter;
use crate::transform::{MirPass, MirSource};
use super::simplify::{remove_dead_blocks, CfgSimplifier};
use syntax::attr;
use rustc_target::spec::abi::Abi;
const DEFAULT_THRESHOLD: usize = 50;
const HINT_THRESHOLD: usize = 100;
const INSTR_COST: usize = 5;
const CALL_PENALTY: usize = 25;
const UNKNOWN_SIZE_COST: usize = 10;
pub struct Inline;
#[derive(Copy, Clone, Debug)]
struct CallSite<'tcx> {
callee: DefId,
substs: SubstsRef<'tcx>,
bb: BasicBlock,
location: SourceInfo,
}
impl<'tcx> MirPass<'tcx> for Inline {
fn run_pass(
&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut BodyCache<'tcx>
) {
if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 {
Inliner { tcx, source }.run_pass(body);
}
}
}
struct Inliner<'tcx> {
tcx: TyCtxt<'tcx>,
source: MirSource<'tcx>,
}
impl Inliner<'tcx> {
fn run_pass(&self, caller_body: &mut BodyCache<'tcx>) {
// Keep a queue of callsites to try inlining on. We take
// advantage of the fact that queries detect cycles here to
// allow us to try and fetch the fully optimized MIR of a
// call; if it succeeds, we can inline it and we know that
// they do not call us. Otherwise, we just don't try to
// inline.
//
// We use a queue so that we inline "broadly" before we inline
// in depth. It is unclear if this is the best heuristic,
// really, but that's true of all the heuristics in this
// file. =)
let mut callsites = VecDeque::new();
let param_env = self.tcx.param_env(self.source.def_id());
// Only do inlining into fn bodies.
let id = self.tcx.hir().as_local_hir_id(self.source.def_id()).unwrap();
if self.tcx.hir().body_owner_kind(id).is_fn_or_closure()
&& self.source.promoted.is_none()
{
for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated() {
if let Some(callsite) = self.get_valid_function_call(bb,
bb_data,
caller_body,
param_env) {
callsites.push_back(callsite);
}
}
} else {
return;
}
let mut local_change;
let mut changed = false;
loop {
local_change = false;
while let Some(callsite) = callsites.pop_front() {
debug!("checking whether to inline callsite {:?}", callsite);
if !self.tcx.is_mir_available(callsite.callee) {
debug!("checking whether to inline callsite {:?} - MIR unavailable", callsite);
continue;
}
let self_node_id = self.tcx.hir().as_local_node_id(self.source.def_id()).unwrap();
let callee_node_id = self.tcx.hir().as_local_node_id(callsite.callee);
let callee_body = if let Some(callee_node_id) = callee_node_id {
// Avoid a cycle here by only using `optimized_mir` only if we have
// a lower node id than the callee. This ensures that the callee will
// not inline us. This trick only works without incremental compilation.
// So don't do it if that is enabled.
if !self.tcx.dep_graph.is_fully_enabled()
&& self_node_id.as_u32() < callee_node_id.as_u32() {
self.tcx.optimized_mir(callsite.callee)
} else {
continue;
}
} else {
// This cannot result in a cycle since the callee MIR is from another crate
// and is already optimized.
self.tcx.optimized_mir(callsite.callee)
};
let callee_body = if self.consider_optimizing(callsite, callee_body) {
self.tcx.subst_and_normalize_erasing_regions(
&callsite.substs,
param_env,
callee_body,
)
} else {
continue;
};
let start = caller_body.basic_blocks().len();
debug!("attempting to inline callsite {:?} - body={:?}", callsite, callee_body);
if !self.inline_call(callsite, caller_body, callee_body) {
debug!("attempting to inline callsite {:?} - failure", callsite);
continue;
}
debug!("attempting to inline callsite {:?} - success", callsite);
// Add callsites from inlined function
for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated().skip(start)
{
if let Some(new_callsite) = self.get_valid_function_call(bb,
bb_data,
caller_body,
param_env) {
// Don't inline the same function multiple times.
if callsite.callee != new_callsite.callee {
callsites.push_back(new_callsite);
}
}
}
local_change = true;
changed = true;
}
if !local_change {
break;
}
}
// Simplify if we inlined anything.
if changed {
debug!("running simplify cfg on {:?}", self.source);
CfgSimplifier::new(caller_body).simplify();
remove_dead_blocks(caller_body);
}
}
fn get_valid_function_call(&self,
bb: BasicBlock,
bb_data: &BasicBlockData<'tcx>,
caller_body: &Body<'tcx>,
param_env: ParamEnv<'tcx>,
) -> Option<CallSite<'tcx>> {
// Don't inline calls that are in cleanup blocks.
if bb_data.is_cleanup { return None; }
// Only consider direct calls to functions
let terminator = bb_data.terminator();
if let TerminatorKind::Call { func: ref op, .. } = terminator.kind {
if let ty::FnDef(callee_def_id, substs) = op.ty(caller_body, self.tcx).kind {
let instance = Instance::resolve(self.tcx,
param_env,
callee_def_id,
substs)?;
if let InstanceDef::Virtual(..) = instance.def {
return None;
}
return Some(CallSite {
callee: instance.def_id(),
substs: instance.substs,
bb,
location: terminator.source_info
});
}
}
None
}
fn consider_optimizing(&self,
callsite: CallSite<'tcx>,
callee_body: &Body<'tcx>)
-> bool
{
debug!("consider_optimizing({:?})", callsite);
self.should_inline(callsite, callee_body)
&& self.tcx.consider_optimizing(|| format!("Inline {:?} into {:?}",
callee_body.span,
callsite))
}
fn should_inline(&self,
callsite: CallSite<'tcx>,
callee_body: &Body<'tcx>)
-> bool
{
debug!("should_inline({:?})", callsite);
let tcx = self.tcx;
// Cannot inline generators which haven't been transformed yet
if callee_body.yield_ty.is_some() {
debug!(" yield ty present - not inlining");
return false;
}
let codegen_fn_attrs = tcx.codegen_fn_attrs(callsite.callee);
let hinted = match codegen_fn_attrs.inline {
// Just treat inline(always) as a hint for now,
// there are cases that prevent inlining that we
// need to check for first.
attr::InlineAttr::Always => true,
attr::InlineAttr::Never => {
debug!("`#[inline(never)]` present - not inlining");
return false
}
attr::InlineAttr::Hint => true,
attr::InlineAttr::None => false,
};
// Only inline local functions if they would be eligible for cross-crate
// inlining. This is to ensure that the final crate doesn't have MIR that
// reference unexported symbols
if callsite.callee.is_local() {
if callsite.substs.non_erasable_generics().count() == 0 && !hinted {
debug!(" callee is an exported function - not inlining");
return false;
}
}
let mut threshold = if hinted {
HINT_THRESHOLD
} else {
DEFAULT_THRESHOLD
};
// Significantly lower the threshold for inlining cold functions
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
threshold /= 5;
}
// Give a bonus functions with a small number of blocks,
// We normally have two or three blocks for even
// very small functions.
if callee_body.basic_blocks().len() <= 3 {
threshold += threshold / 4;
}
debug!(" final inline threshold = {}", threshold);
// FIXME: Give a bonus to functions with only a single caller
let param_env = tcx.param_env(self.source.def_id());
let mut first_block = true;
let mut cost = 0;
// Traverse the MIR manually so we can account for the effects of
// inlining on the CFG.
let mut work_list = vec![START_BLOCK];
let mut visited = BitSet::new_empty(callee_body.basic_blocks().len());
while let Some(bb) = work_list.pop() {
if !visited.insert(bb.index()) { continue; }
let blk = &callee_body.basic_blocks()[bb];
for stmt in &blk.statements {
// Don't count StorageLive/StorageDead in the inlining cost.
match stmt.kind {
StatementKind::StorageLive(_) |
StatementKind::StorageDead(_) |
StatementKind::Nop => {}
_ => cost += INSTR_COST
}
}
let term = blk.terminator();
let mut is_drop = false;
match term.kind {
TerminatorKind::Drop { ref location, target, unwind } |
TerminatorKind::DropAndReplace { ref location, target, unwind, .. } => {
is_drop = true;
work_list.push(target);
// If the location doesn't actually need dropping, treat it like
// a regular goto.
let ty = location.ty(callee_body, tcx).subst(tcx, callsite.substs).ty;
if ty.needs_drop(tcx, param_env) {
cost += CALL_PENALTY;
if let Some(unwind) = unwind {
work_list.push(unwind);
}
} else {
cost += INSTR_COST;
}
}
TerminatorKind::Unreachable |
TerminatorKind::Call { destination: None, .. } if first_block => {
// If the function always diverges, don't inline
// unless the cost is zero
threshold = 0;
}
TerminatorKind::Call {func: Operand::Constant(ref f), .. } => {
if let ty::FnDef(def_id, _) = f.literal.ty.kind {
// Don't give intrinsics the extra penalty for calls
let f = tcx.fn_sig(def_id);
if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
cost += INSTR_COST;
} else {
cost += CALL_PENALTY;
}
}
}
TerminatorKind::Assert { .. } => cost += CALL_PENALTY,
_ => cost += INSTR_COST
}
if !is_drop {
for &succ in term.successors() {
work_list.push(succ);
}
}
first_block = false;
}
// Count up the cost of local variables and temps, if we know the size
// use that, otherwise we use a moderately-large dummy cost.
let ptr_size = tcx.data_layout.pointer_size.bytes();
for v in callee_body.vars_and_temps_iter() {
let v = &callee_body.local_decls[v];
let ty = v.ty.subst(tcx, callsite.substs);
// Cost of the var is the size in machine-words, if we know
// it.
if let Some(size) = type_size_of(tcx, param_env.clone(), ty) {
cost += (size / ptr_size) as usize;
} else {
cost += UNKNOWN_SIZE_COST;
}
}
if let attr::InlineAttr::Always = codegen_fn_attrs.inline {
debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost);
true
} else {
if cost <= threshold {
debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold);
true
} else {
debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold);
false
}
}
}
fn inline_call(&self,
callsite: CallSite<'tcx>,
caller_body: &mut BodyCache<'tcx>,
mut callee_body: BodyCache<'tcx>) -> bool {
let terminator = caller_body[callsite.bb].terminator.take().unwrap();
match terminator.kind {
// FIXME: Handle inlining of diverging calls
TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
debug!("inlined {:?} into {:?}", callsite.callee, self.source);
let mut local_map = IndexVec::with_capacity(callee_body.local_decls.len());
let mut scope_map = IndexVec::with_capacity(callee_body.source_scopes.len());
for mut scope in callee_body.source_scopes.iter().cloned() {
if scope.parent_scope.is_none() {
scope.parent_scope = Some(callsite.location.scope);
// FIXME(eddyb) is this really needed?
// (also note that it's always overwritten below)
scope.span = callee_body.span;
}
// FIXME(eddyb) this doesn't seem right at all.
// The inlined source scopes should probably be annotated as
// such, but also contain all of the original information.
scope.span = callsite.location.span;
let idx = caller_body.source_scopes.push(scope);
scope_map.push(idx);
}
for loc in callee_body.vars_and_temps_iter() {
let mut local = callee_body.local_decls[loc].clone();
local.source_info.scope =
scope_map[local.source_info.scope];
local.source_info.span = callsite.location.span;
let idx = caller_body.local_decls.push(local);
local_map.push(idx);
}
// If the call is something like `a[*i] = f(i)`, where
// `i : &mut usize`, then just duplicating the `a[*i]`
// Place could result in two different locations if `f`
// writes to `i`. To prevent this we need to create a temporary
// borrow of the place and pass the destination as `*temp` instead.
fn dest_needs_borrow(place: &Place<'_>) -> bool {
for elem in place.projection.iter() {
match elem {
ProjectionElem::Deref |
ProjectionElem::Index(_) => return true,
_ => {}
}
}
match place.base {
// Static variables need a borrow because the callee
// might modify the same static.
PlaceBase::Static(_) => true,
_ => false
}
}
let dest = if dest_needs_borrow(&destination.0) {
debug!("creating temp for return destination");
let dest = Rvalue::Ref(
self.tcx.lifetimes.re_erased,
BorrowKind::Mut { allow_two_phase_borrow: false },
destination.0);
let ty = dest.ty(&**caller_body, self.tcx);
let temp = LocalDecl::new_temp(ty, callsite.location.span);
let tmp = caller_body.local_decls.push(temp);
let tmp = Place::from(tmp);
let stmt = Statement {
source_info: callsite.location,
kind: StatementKind::Assign(box(tmp.clone(), dest))
};
caller_body[callsite.bb]
.statements.push(stmt);
self.tcx.mk_place_deref(tmp)
} else {
destination.0
};
let return_block = destination.1;
// Copy the arguments if needed.
let args: Vec<_> = self.make_call_args(args, &callsite, caller_body);
let bb_len = caller_body.basic_blocks().len();
let mut integrator = Integrator {
block_idx: bb_len,
args: &args,
local_map,
scope_map,
destination: dest,
return_block,
cleanup_block: cleanup,
in_cleanup_block: false,
tcx: self.tcx,
};
for mut var_debug_info in callee_body.var_debug_info.drain(..) {
integrator.visit_var_debug_info(&mut var_debug_info);
caller_body.var_debug_info.push(var_debug_info);
}
for (bb, mut block) in callee_body.basic_blocks_mut().drain_enumerated(..) {
integrator.visit_basic_block_data(bb, &mut block);
caller_body.basic_blocks_mut().push(block);
}
let terminator = Terminator {
source_info: callsite.location,
kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) }
};
caller_body[callsite.bb].terminator = Some(terminator);
true
}
kind => {
caller_body[callsite.bb].terminator = Some(Terminator {
source_info: terminator.source_info,
kind,
});
false
}
}
}
fn make_call_args(
&self,
args: Vec<Operand<'tcx>>,
callsite: &CallSite<'tcx>,
caller_body: &mut BodyCache<'tcx>,
) -> Vec<Local> {
let tcx = self.tcx;
// There is a bit of a mismatch between the *caller* of a closure and the *callee*.
// The caller provides the arguments wrapped up in a tuple:
//
// tuple_tmp = (a, b, c)
// Fn::call(closure_ref, tuple_tmp)
//
// meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
// as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has
// the job of unpacking this tuple. But here, we are codegen. =) So we want to create
// a vector like
//
// [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
//
// Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
// if we "spill" that into *another* temporary, so that we can map the argument
// variable in the callee MIR directly to an argument variable on our side.
// So we introduce temporaries like:
//
// tmp0 = tuple_tmp.0
// tmp1 = tuple_tmp.1
// tmp2 = tuple_tmp.2
//
// and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
if tcx.is_closure(callsite.callee) {
let mut args = args.into_iter();
let self_
= self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
let tuple
= self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_body);
assert!(args.next().is_none());
let tuple = Place::from(tuple);
let tuple_tys = if let ty::Tuple(s) = tuple.ty(&**caller_body, tcx).ty.kind {
s
} else {
bug!("Closure arguments are not passed as a tuple");
};
// The `closure_ref` in our example above.
let closure_ref_arg = iter::once(self_);
// The `tmp0`, `tmp1`, and `tmp2` in our example abonve.
let tuple_tmp_args =
tuple_tys.iter().enumerate().map(|(i, ty)| {
// This is e.g., `tuple_tmp.0` in our example above.
let tuple_field = Operand::Move(tcx.mk_place_field(
tuple.clone(),
Field::new(i),
ty.expect_ty(),
));
// Spill to a local to make e.g., `tmp0`.
self.create_temp_if_necessary(tuple_field, callsite, caller_body)
});
closure_ref_arg.chain(tuple_tmp_args).collect()
} else {
args.into_iter()
.map(|a| self.create_temp_if_necessary(a, callsite, caller_body))
.collect()
}
}
/// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
/// temporary `T` and an instruction `T = arg`, and returns `T`.
fn create_temp_if_necessary(
&self,
arg: Operand<'tcx>,
callsite: &CallSite<'tcx>,
caller_body: &mut BodyCache<'tcx>,
) -> Local {
// FIXME: Analysis of the usage of the arguments to avoid
// unnecessary temporaries.
if let Operand::Move(place) = &arg {
if let Some(local) = place.as_local() {
if caller_body.local_kind(local) == LocalKind::Temp {
// Reuse the operand if it's a temporary already
return local;
}
}
}
debug!("creating temp for argument {:?}", arg);
// Otherwise, create a temporary for the arg
let arg = Rvalue::Use(arg);
let ty = arg.ty(&**caller_body, self.tcx);
let arg_tmp = LocalDecl::new_temp(ty, callsite.location.span);
let arg_tmp = caller_body.local_decls.push(arg_tmp);
let stmt = Statement {
source_info: callsite.location,
kind: StatementKind::Assign(box(Place::from(arg_tmp), arg)),
};
caller_body[callsite.bb].statements.push(stmt);
arg_tmp
}
}
fn type_size_of<'tcx>(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
) -> Option<u64> {
tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
}
/**
* Integrator.
*
* Integrates blocks from the callee function into the calling function.
* Updates block indices, references to locals and other control flow
* stuff.
*/
struct Integrator<'a, 'tcx> {
block_idx: usize,
args: &'a [Local],
local_map: IndexVec<Local, Local>,
scope_map: IndexVec<SourceScope, SourceScope>,
destination: Place<'tcx>,
return_block: BasicBlock,
cleanup_block: Option<BasicBlock>,
in_cleanup_block: bool,
tcx: TyCtxt<'tcx>,
}
impl<'a, 'tcx> Integrator<'a, 'tcx> {
fn update_target(&self, tgt: BasicBlock) -> BasicBlock {
let new = BasicBlock::new(tgt.index() + self.block_idx);
debug!("updating target `{:?}`, new: `{:?}`", tgt, new);
new
}
fn make_integrate_local(&self, local: &Local) -> Local {
if *local == RETURN_PLACE {
match self.destination.as_local() {
Some(l) => return l,
ref place => bug!("Return place is {:?}, not local", place),
}
}
let idx = local.index() - 1;
if idx < self.args.len() {
return self.args[idx];
}
self.local_map[Local::new(idx - self.args.len())]
}
}
impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn visit_local(
&mut self,
local: &mut Local,
_ctxt: PlaceContext,
_location: Location,
) {
*local = self.make_integrate_local(local);
}
fn visit_place(
&mut self,
place: &mut Place<'tcx>,
context: PlaceContext,
location: Location,
) {
if let Some(RETURN_PLACE) = place.as_local() {
// Return pointer; update the place itself
*place = self.destination.clone();
} else {
self.super_place(place, context, location);
}
}
fn process_projection_elem(
&mut self,
elem: &PlaceElem<'tcx>,
) -> Option<PlaceElem<'tcx>> {
if let PlaceElem::Index(local) = elem {
let new_local = self.make_integrate_local(local);
if new_local != *local {
return Some(PlaceElem::Index(new_local))
}
}
None
}
fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
self.in_cleanup_block = data.is_cleanup;
self.super_basic_block_data(block, data);
self.in_cleanup_block = false;
}
fn visit_retag(
&mut self,
kind: &mut RetagKind,
place: &mut Place<'tcx>,
loc: Location,
) {
self.super_retag(kind, place, loc);
// We have to patch all inlined retags to be aware that they are no longer
// happening on function entry.
if *kind == RetagKind::FnEntry {
*kind = RetagKind::Default;
}
}
fn visit_terminator_kind(&mut self,
kind: &mut TerminatorKind<'tcx>, loc: Location) {
self.super_terminator_kind(kind, loc);
match *kind {
TerminatorKind::GeneratorDrop |
TerminatorKind::Yield { .. } => bug!(),
TerminatorKind::Goto { ref mut target} => {
*target = self.update_target(*target);
}
TerminatorKind::SwitchInt { ref mut targets, .. } => {
for tgt in targets {
*tgt = self.update_target(*tgt);
}
}
TerminatorKind::Drop { ref mut target, ref mut unwind, .. } |
TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
*target = self.update_target(*target);
if let Some(tgt) = *unwind {
*unwind = Some(self.update_target(tgt));
} else if !self.in_cleanup_block {
// Unless this drop is in a cleanup block, add an unwind edge to
// the original call's cleanup block
*unwind = self.cleanup_block;
}
}
TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
if let Some((_, ref mut tgt)) = *destination {
*tgt = self.update_target(*tgt);
}
if let Some(tgt) = *cleanup {
*cleanup = Some(self.update_target(tgt));
} else if !self.in_cleanup_block {
// Unless this call is in a cleanup block, add an unwind edge to
// the original call's cleanup block
*cleanup = self.cleanup_block;
}
}
TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
*target = self.update_target(*target);
if let Some(tgt) = *cleanup {
*cleanup = Some(self.update_target(tgt));
} else if !self.in_cleanup_block {
// Unless this assert is in a cleanup block, add an unwind edge to
// the original call's cleanup block
*cleanup = self.cleanup_block;
}
}
TerminatorKind::Return => {
*kind = TerminatorKind::Goto { target: self.return_block };
}
TerminatorKind::Resume => {
if let Some(tgt) = self.cleanup_block {
*kind = TerminatorKind::Goto { target: tgt }
}
}
TerminatorKind::Abort => { }
TerminatorKind::Unreachable => { }
TerminatorKind::FalseEdges { ref mut real_target, ref mut imaginary_target } => {
*real_target = self.update_target(*real_target);
*imaginary_target = self.update_target(*imaginary_target);
}
TerminatorKind::FalseUnwind { real_target: _ , unwind: _ } =>
// see the ordering of passes in the optimized_mir query.
bug!("False unwinds should have been removed before inlining")
}
}
fn visit_source_scope(&mut self, scope: &mut SourceScope) {
*scope = self.scope_map[*scope];
}
}
| 38.246305 | 99 | 0.502318 |
036cfe157b449367299a853c95e759aa51c6c2e5 | 6,678 | //! Fully integrated benchmarks for rust-analyzer, which load real cargo
//! projects.
//!
//! The benchmark here is used to debug specific performance regressions. If you
//! notice that, eg, completion is slow in some specific case, you can modify
//! code here exercise this specific completion, and thus have a fast
//! edit/compile/test cycle.
//!
//! Note that "Rust Analyzer: Run" action does not allow running a single test
//! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line"
//! which you can use to paste the command in terminal and add `--release` manually.
use std::{convert::TryFrom, sync::Arc};
use ide::{Change, CompletionConfig, FilePosition, TextSize};
use ide_db::helpers::{
insert_use::{ImportGranularity, InsertUseConfig},
SnippetCap,
};
use project_model::CargoConfig;
use test_utils::project_root;
use vfs::{AbsPathBuf, VfsPath};
use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig};
#[test]
fn integrated_highlighting_benchmark() {
if std::env::var("RUN_SLOW_BENCHES").is_err() {
return;
}
// Load rust-analyzer itself.
let workspace_to_load = project_root();
let file = "./crates/ide_db/src/apply_change.rs";
let cargo_config = CargoConfig::default();
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro: false,
prefill_caches: false,
};
let (mut host, vfs, _proc_macro) = {
let _it = stdx::timeit("workspace loading");
load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
};
let file_id = {
let file = workspace_to_load.join(file);
let path = VfsPath::from(AbsPathBuf::assert(file));
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
};
{
let _it = stdx::timeit("initial");
let analysis = host.analysis();
analysis.highlight_as_html(file_id, false).unwrap();
}
profile::init_from("*>100");
// let _s = profile::heartbeat_span();
{
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
text.push_str("\npub fn _dummy() {}\n");
let mut change = Change::new();
change.change_file(file_id, Some(Arc::new(text)));
host.apply_change(change);
}
{
let _it = stdx::timeit("after change");
let _span = profile::cpu_span();
let analysis = host.analysis();
analysis.highlight_as_html(file_id, false).unwrap();
}
}
#[test]
fn integrated_completion_benchmark() {
if std::env::var("RUN_SLOW_BENCHES").is_err() {
return;
}
// Load rust-analyzer itself.
let workspace_to_load = project_root();
let file = "./crates/hir/src/lib.rs";
let cargo_config = CargoConfig::default();
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro: false,
prefill_caches: true,
};
let (mut host, vfs, _proc_macro) = {
let _it = stdx::timeit("workspace loading");
load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
};
let file_id = {
let file = workspace_to_load.join(file);
let path = VfsPath::from(AbsPathBuf::assert(file));
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
};
{
let _it = stdx::timeit("initial");
let analysis = host.analysis();
analysis.highlight_as_html(file_id, false).unwrap();
}
profile::init_from("*>5");
// let _s = profile::heartbeat_span();
let completion_offset = {
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
let completion_offset =
patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ "sel".len();
let mut change = Change::new();
change.change_file(file_id, Some(Arc::new(text)));
host.apply_change(change);
completion_offset
};
{
let _p = profile::span("unqualified path completion");
let _span = profile::cpu_span();
let analysis = host.analysis();
let config = CompletionConfig {
enable_postfix_completions: true,
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
add_call_parenthesis: true,
add_call_argument_snippets: true,
snippet_cap: SnippetCap::new(true),
insert_use: InsertUseConfig {
granularity: ImportGranularity::Crate,
prefix_kind: hir::PrefixKind::ByCrate,
enforce_granularity: true,
group: true,
skip_glob_imports: true,
},
};
let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
analysis.completions(&config, position).unwrap();
}
let completion_offset = {
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
let completion_offset =
patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
+ "self.".len();
let mut change = Change::new();
change.change_file(file_id, Some(Arc::new(text)));
host.apply_change(change);
completion_offset
};
{
let _p = profile::span("dot completion");
let _span = profile::cpu_span();
let analysis = host.analysis();
let config = CompletionConfig {
enable_postfix_completions: true,
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
add_call_parenthesis: true,
add_call_argument_snippets: true,
snippet_cap: SnippetCap::new(true),
insert_use: InsertUseConfig {
granularity: ImportGranularity::Crate,
prefix_kind: hir::PrefixKind::ByCrate,
enforce_granularity: true,
group: true,
skip_glob_imports: true,
},
};
let position =
FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
analysis.completions(&config, position).unwrap();
}
}
fn patch(what: &mut String, from: &str, to: &str) -> usize {
let idx = what.find(from).unwrap();
*what = what.replacen(from, to, 1);
idx
}
| 34.246154 | 98 | 0.613807 |
0a3446e13b1dbbbd8eccf88ee7eb02dca9d3f18b | 1,529 | use core::iter;
use ockam_core::{route, Address, Result, Routed, Worker};
use ockam_node::Context;
use rand::Rng;
use ockam_transport_tcp::{TcpTransport, TCP};
#[ockam_macros::test]
async fn send_receive(ctx: &mut Context) -> Result<()> {
let rand_port = rand::thread_rng().gen_range(10000, 65535);
let bind_address = format!("127.0.0.1:{}", rand_port);
let bind_address = bind_address.as_str();
let _listener = {
let transport = TcpTransport::create(ctx).await?;
transport.listen(bind_address).await?;
ctx.start_worker("echoer", Echoer).await?;
};
let _sender = {
let mut ctx = ctx.new_context(Address::random(0)).await?;
let msg: String = {
let mut rng = rand::thread_rng();
iter::repeat(())
.map(|()| rng.sample(&rand::distributions::Alphanumeric))
.take(10)
.collect()
};
let r = route![(TCP, bind_address), "echoer"];
ctx.send(r, msg.clone()).await?;
let reply = ctx.receive::<String>().await?;
assert_eq!(reply, msg, "Should receive the same message");
};
if let Err(e) = ctx.stop().await {
println!("Unclean stop: {}", e)
}
Ok(())
}
pub struct Echoer;
#[ockam_core::worker]
impl Worker for Echoer {
type Message = String;
type Context = Context;
async fn handle_message(&mut self, ctx: &mut Context, msg: Routed<String>) -> Result<()> {
ctx.send(msg.return_route(), msg.body()).await
}
}
| 28.314815 | 94 | 0.589274 |
d7adf4ff5dd2df6e5277cb093ec1dedfad6d7665 | 52,471 | use std::convert::TryFrom;
use std::io::{self, Cursor, Read, Seek, SeekFrom};
use std::iter::{repeat, Iterator, Rev};
use std::marker::PhantomData;
use std::slice::ChunksMut;
use std::{cmp, mem};
use std::cmp::Ordering;
use byteorder::{LittleEndian, ReadBytesExt};
use crate::color::ColorType;
use crate::error::{
DecodingError, ImageError, ImageResult, UnsupportedError, UnsupportedErrorKind,
};
use crate::image::{self, ImageDecoder, ImageDecoderExt, ImageFormat, Progress};
const BITMAPCOREHEADER_SIZE: u32 = 12;
const BITMAPINFOHEADER_SIZE: u32 = 40;
const BITMAPV2HEADER_SIZE: u32 = 52;
const BITMAPV3HEADER_SIZE: u32 = 56;
const BITMAPV4HEADER_SIZE: u32 = 108;
const BITMAPV5HEADER_SIZE: u32 = 124;
static LOOKUP_TABLE_3_BIT_TO_8_BIT: [u8; 8] = [0, 36, 73, 109, 146, 182, 219, 255];
static LOOKUP_TABLE_4_BIT_TO_8_BIT: [u8; 16] = [
0, 17, 34, 51, 68, 85, 102, 119, 136, 153, 170, 187, 204, 221, 238, 255,
];
static LOOKUP_TABLE_5_BIT_TO_8_BIT: [u8; 32] = [
0, 8, 16, 25, 33, 41, 49, 58, 66, 74, 82, 90, 99, 107, 115, 123, 132, 140, 148, 156, 165, 173,
181, 189, 197, 206, 214, 222, 230, 239, 247, 255,
];
static LOOKUP_TABLE_6_BIT_TO_8_BIT: [u8; 64] = [
0, 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 45, 49, 53, 57, 61, 65, 69, 73, 77, 81, 85, 89, 93,
97, 101, 105, 109, 113, 117, 121, 125, 130, 134, 138, 142, 146, 150, 154, 158, 162, 166, 170,
174, 178, 182, 186, 190, 194, 198, 202, 206, 210, 215, 219, 223, 227, 231, 235, 239, 243, 247,
251, 255,
];
static R5_G5_B5_COLOR_MASK: Bitfields = Bitfields {
r: Bitfield { len: 5, shift: 10 },
g: Bitfield { len: 5, shift: 5 },
b: Bitfield { len: 5, shift: 0 },
a: Bitfield { len: 0, shift: 0 },
};
const R8_G8_B8_COLOR_MASK: Bitfields = Bitfields {
r: Bitfield { len: 8, shift: 24 },
g: Bitfield { len: 8, shift: 16 },
b: Bitfield { len: 8, shift: 8 },
a: Bitfield { len: 0, shift: 0 },
};
const RLE_ESCAPE: u8 = 0;
const RLE_ESCAPE_EOL: u8 = 0;
const RLE_ESCAPE_EOF: u8 = 1;
const RLE_ESCAPE_DELTA: u8 = 2;
/// The maximum width/height the decoder will process.
const MAX_WIDTH_HEIGHT: i32 = 0xFFFF;
#[derive(PartialEq, Copy, Clone)]
enum ImageType {
Palette,
RGB16,
RGB24,
RGB32,
RGBA32,
RLE8,
RLE4,
Bitfields16,
Bitfields32,
}
#[derive(PartialEq)]
enum BMPHeaderType {
Core,
Info,
V2,
V3,
V4,
V5,
}
#[derive(PartialEq)]
enum FormatFullBytes {
RGB24,
RGB32,
RGBA32,
Format888,
}
enum Chunker<'a> {
FromTop(ChunksMut<'a, u8>),
FromBottom(Rev<ChunksMut<'a, u8>>),
}
pub(crate) struct RowIterator<'a> {
chunks: Chunker<'a>,
}
impl<'a> Iterator for RowIterator<'a> {
type Item = &'a mut [u8];
#[inline(always)]
fn next(&mut self) -> Option<&'a mut [u8]> {
match self.chunks {
Chunker::FromTop(ref mut chunks) => chunks.next(),
Chunker::FromBottom(ref mut chunks) => chunks.next(),
}
}
}
/// Convenience function to check if the combination of width, length and number of
/// channels would result in a buffer that would overflow.
fn check_for_overflow(width: i32, length: i32, channels: usize) -> ImageResult<()> {
num_bytes(width, length, channels)
.map(|_| ())
.ok_or_else(|| {
ImageError::Decoding(DecodingError::with_message(
ImageFormat::Png.into(),
"Image would require a buffer that is too large to be represented!".to_owned(),
))
})
}
/// Calculate how many many bytes a buffer holding a decoded image with these properties would
/// require. Returns `None` if the buffer size would overflow or if one of the sizes are negative.
fn num_bytes(width: i32, length: i32, channels: usize) -> Option<usize> {
if width <= 0 || length <= 0 {
None
} else {
match channels.checked_mul(width as usize) {
Some(n) => n.checked_mul(length as usize),
None => None,
}
}
}
/// The maximum starting number of pixels in the pixel buffer, might want to tweak this.
///
/// For images that specify large sizes, we don't allocate the full buffer right away
/// to somewhat mitigate trying to make the decoder run out of memory by sending a bogus image.
/// This is somewhat of a workaroud as ideally we would check against the expected file size
/// but that's not possible through the Read and Seek traits alone and would require the encoder
/// to provided with it from the caller.
///
/// NOTE: This is multiplied by 3 or 4 depending on the number of channels to get the maximum
/// starting buffer size. This amounts to about 134 mb for a buffer with 4 channels.
const MAX_INITIAL_PIXELS: usize = 8192 * 4096;
/// Sets all bytes in an mutable iterator over slices of bytes to 0.
fn blank_bytes<'a, T: Iterator<Item = &'a mut [u8]>>(iterator: T) {
for chunk in iterator {
for b in chunk {
*b = 0;
}
}
}
/// Extend the buffer to `full_size`, copying existing data to the end of the buffer. Returns slice
/// pointing to the part of the buffer that is not yet filled in.
///
/// If blank is true, the bytes in the new buffer that are not filled in are set to 0.
/// This is used for rle-encoded images as the decoding process for these may not fill in all the
/// pixels.
///
/// As BMP images are usually stored with the rows upside-down we have to write the image data
/// starting at the end of the buffer and thus we have to make sure the existing data is put at the
/// end of the buffer.
#[inline(never)]
#[cold]
fn extend_buffer(buffer: &mut Vec<u8>, full_size: usize, blank: bool) -> &mut [u8] {
let old_size = buffer.len();
let extend = full_size - buffer.len();
buffer.extend(repeat(0xFF).take(extend));
assert_eq!(buffer.len(), full_size);
let ret = if extend >= old_size {
// If the full buffer length is more or equal to twice the initial one, we can simply
// copy the data in the lower part of the buffer to the end of it and input from there.
let (new, old) = buffer.split_at_mut(extend);
old.copy_from_slice(&new[..old_size]);
new
} else {
// If the full size is less than twice the initial buffer, we have to
// copy in two steps
let overlap = old_size - extend;
// First we copy the data that fits into the bit we extended.
let (lower, upper) = buffer.split_at_mut(old_size);
upper.copy_from_slice(&lower[overlap..]);
// Then we slide the data that hasn't been copied yet to the top of the buffer
let (new, old) = lower.split_at_mut(extend);
old[..overlap].copy_from_slice(&new[..overlap]);
new
};
if blank {
for b in ret.iter_mut() {
*b = 0;
}
};
ret
}
/// Call the provided function on each row of the provided buffer, returning Err if the provided
/// function returns an error, extends the buffer if it's not large enough.
fn with_rows<F>(
buffer: &mut Vec<u8>,
width: i32,
height: i32,
channels: usize,
top_down: bool,
mut func: F,
) -> io::Result<()>
where
F: FnMut(&mut [u8]) -> io::Result<()>,
{
// An overflow should already have been checked for when this is called,
// though we check anyhow, as it somehow seems to increase performance slightly.
let row_width = channels.checked_mul(width as usize).unwrap();
let full_image_size = row_width.checked_mul(height as usize).unwrap();
if !top_down {
for row in buffer.chunks_mut(row_width).rev() {
func(row)?;
}
// If we need more space, extend the buffer.
if buffer.len() < full_image_size {
let new_space = extend_buffer(buffer, full_image_size, false);
for row in new_space.chunks_mut(row_width).rev() {
func(row)?;
}
}
} else {
for row in buffer.chunks_mut(row_width) {
func(row)?;
}
if buffer.len() < full_image_size {
// If the image is stored in top-down order, we can simply use the extend function
// from vec to extend the buffer..
let extend = full_image_size - buffer.len();
buffer.extend(repeat(0xFF).take(extend));
let len = buffer.len();
for row in buffer[len - row_width..].chunks_mut(row_width) {
func(row)?;
}
};
}
Ok(())
}
fn set_8bit_pixel_run<'a, T: Iterator<Item = &'a u8>>(
pixel_iter: &mut ChunksMut<u8>,
palette: &[(u8, u8, u8)],
indices: T,
n_pixels: usize,
) -> bool {
for idx in indices.take(n_pixels) {
if let Some(pixel) = pixel_iter.next() {
let (r, g, b) = palette[*idx as usize];
pixel[0] = r;
pixel[1] = g;
pixel[2] = b;
} else {
return false;
}
}
true
}
fn set_4bit_pixel_run<'a, T: Iterator<Item = &'a u8>>(
pixel_iter: &mut ChunksMut<u8>,
palette: &[(u8, u8, u8)],
indices: T,
mut n_pixels: usize,
) -> bool {
for idx in indices {
macro_rules! set_pixel {
($i:expr) => {
if n_pixels == 0 {
break;
}
if let Some(pixel) = pixel_iter.next() {
let (r, g, b) = palette[$i as usize];
pixel[0] = r;
pixel[1] = g;
pixel[2] = b;
} else {
return false;
}
n_pixels -= 1;
};
}
set_pixel!(idx >> 4);
set_pixel!(idx & 0xf);
}
true
}
#[rustfmt::skip]
fn set_2bit_pixel_run<'a, T: Iterator<Item = &'a u8>>(
pixel_iter: &mut ChunksMut<u8>,
palette: &[(u8, u8, u8)],
indices: T,
mut n_pixels: usize,
) -> bool {
for idx in indices {
macro_rules! set_pixel {
($i:expr) => {
if n_pixels == 0 {
break;
}
if let Some(pixel) = pixel_iter.next() {
let (r, g, b) = palette[$i as usize];
pixel[0] = r;
pixel[1] = g;
pixel[2] = b;
} else {
return false;
}
n_pixels -= 1;
};
}
set_pixel!((idx >> 6) & 0x3u8);
set_pixel!((idx >> 4) & 0x3u8);
set_pixel!((idx >> 2) & 0x3u8);
set_pixel!( idx & 0x3u8);
}
true
}
fn set_1bit_pixel_run<'a, T: Iterator<Item = &'a u8>>(
pixel_iter: &mut ChunksMut<u8>,
palette: &[(u8, u8, u8)],
indices: T,
) {
for idx in indices {
let mut bit = 0x80;
loop {
if let Some(pixel) = pixel_iter.next() {
let (r, g, b) = palette[((idx & bit) != 0) as usize];
pixel[0] = r;
pixel[1] = g;
pixel[2] = b;
} else {
return;
}
bit >>= 1;
if bit == 0 {
break;
}
}
}
}
#[derive(PartialEq, Eq)]
struct Bitfield {
shift: u32,
len: u32,
}
impl Bitfield {
fn from_mask(mask: u32, max_len: u32) -> ImageResult<Bitfield> {
if mask == 0 {
return Ok(Bitfield { shift: 0, len: 0 });
}
let mut shift = mask.trailing_zeros();
let mut len = (!(mask >> shift)).trailing_zeros();
if len != mask.count_ones() {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Non-contiguous bitfield mask".to_owned(),
)));
}
if len + shift > max_len {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Invalid bitfield mask".to_owned(),
)));
}
if len > 8 {
shift += len - 8;
len = 8;
}
Ok(Bitfield { shift, len })
}
fn read(&self, data: u32) -> u8 {
let data = data >> self.shift;
match self.len {
1 => ((data & 0b1) * 0xff) as u8,
2 => ((data & 0b11) * 0x55) as u8,
3 => LOOKUP_TABLE_3_BIT_TO_8_BIT[(data & 0b00_0111) as usize],
4 => LOOKUP_TABLE_4_BIT_TO_8_BIT[(data & 0b00_1111) as usize],
5 => LOOKUP_TABLE_5_BIT_TO_8_BIT[(data & 0b01_1111) as usize],
6 => LOOKUP_TABLE_6_BIT_TO_8_BIT[(data & 0b11_1111) as usize],
7 => ((data & 0x7f) << 1 | (data & 0x7f) >> 6) as u8,
8 => (data & 0xff) as u8,
_ => panic!(),
}
}
}
#[derive(PartialEq, Eq)]
struct Bitfields {
r: Bitfield,
g: Bitfield,
b: Bitfield,
a: Bitfield,
}
impl Bitfields {
fn from_mask(
r_mask: u32,
g_mask: u32,
b_mask: u32,
a_mask: u32,
max_len: u32,
) -> ImageResult<Bitfields> {
let bitfields = Bitfields {
r: Bitfield::from_mask(r_mask, max_len)?,
g: Bitfield::from_mask(g_mask, max_len)?,
b: Bitfield::from_mask(b_mask, max_len)?,
a: Bitfield::from_mask(a_mask, max_len)?,
};
if bitfields.r.len == 0 || bitfields.g.len == 0 || bitfields.b.len == 0 {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Missing bitfield mask".to_owned(),
)));
}
Ok(bitfields)
}
}
/// A bmp decoder
pub struct BmpDecoder<R> {
reader: R,
bmp_header_type: BMPHeaderType,
width: i32,
height: i32,
data_offset: u64,
top_down: bool,
no_file_header: bool,
add_alpha_channel: bool,
has_loaded_metadata: bool,
image_type: ImageType,
bit_count: u16,
colors_used: u32,
palette: Option<Vec<(u8, u8, u8)>>,
bitfields: Option<Bitfields>,
}
enum RLEInsn {
EndOfFile,
EndOfRow,
Delta(u8, u8),
Absolute(u8, Vec<u8>),
PixelRun(u8, u8),
}
struct RLEInsnIterator<'a, R: 'a + Read> {
r: &'a mut R,
image_type: ImageType,
}
impl<'a, R: Read> Iterator for RLEInsnIterator<'a, R> {
type Item = RLEInsn;
fn next(&mut self) -> Option<RLEInsn> {
let control_byte = match self.r.read_u8() {
Ok(b) => b,
Err(_) => return None,
};
match control_byte {
RLE_ESCAPE => {
let op = match self.r.read_u8() {
Ok(b) => b,
Err(_) => return None,
};
match op {
RLE_ESCAPE_EOL => Some(RLEInsn::EndOfRow),
RLE_ESCAPE_EOF => Some(RLEInsn::EndOfFile),
RLE_ESCAPE_DELTA => {
let xdelta = match self.r.read_u8() {
Ok(n) => n,
Err(_) => return None,
};
let ydelta = match self.r.read_u8() {
Ok(n) => n,
Err(_) => return None,
};
Some(RLEInsn::Delta(xdelta, ydelta))
}
_ => {
let mut length = op as usize;
if self.image_type == ImageType::RLE4 {
length = (length + 1) / 2;
}
length += length & 1;
let mut buffer = vec![0; length];
match self.r.read_exact(&mut buffer) {
Ok(()) => Some(RLEInsn::Absolute(op, buffer)),
Err(_) => None,
}
}
}
}
_ => match self.r.read_u8() {
Ok(palette_index) => Some(RLEInsn::PixelRun(control_byte, palette_index)),
Err(_) => None,
},
}
}
}
impl<R: Read + Seek> BmpDecoder<R> {
/// Create a new decoder that decodes from the stream ```r```
pub fn new(reader: R) -> ImageResult<BmpDecoder<R>> {
let mut decoder = BmpDecoder {
reader,
bmp_header_type: BMPHeaderType::Info,
width: 0,
height: 0,
data_offset: 0,
top_down: false,
no_file_header: false,
add_alpha_channel: false,
has_loaded_metadata: false,
image_type: ImageType::Palette,
bit_count: 0,
colors_used: 0,
palette: None,
bitfields: None,
};
decoder.read_metadata()?;
Ok(decoder)
}
#[cfg(feature = "ico")]
pub(crate) fn new_with_ico_format(reader: R) -> ImageResult<BmpDecoder<R>> {
let mut decoder = BmpDecoder {
reader,
bmp_header_type: BMPHeaderType::Info,
width: 0,
height: 0,
data_offset: 0,
top_down: false,
no_file_header: false,
add_alpha_channel: false,
has_loaded_metadata: false,
image_type: ImageType::Palette,
bit_count: 0,
colors_used: 0,
palette: None,
bitfields: None,
};
decoder.read_metadata_in_ico_format()?;
Ok(decoder)
}
#[cfg(feature = "ico")]
pub(crate) fn reader(&mut self) -> &mut R {
&mut self.reader
}
fn read_file_header(&mut self) -> ImageResult<()> {
if self.no_file_header {
return Ok(());
}
let mut signature = [0; 2];
self.reader.read_exact(&mut signature)?;
if signature != b"BM"[..] {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"BMP signature not found".to_owned(),
)));
}
// The next 8 bytes represent file size, followed the 4 reserved bytes
// We're not interesting these values
self.reader.read_u32::<LittleEndian>()?;
self.reader.read_u32::<LittleEndian>()?;
self.data_offset = u64::from(self.reader.read_u32::<LittleEndian>()?);
Ok(())
}
/// Read BITMAPCOREHEADER https://msdn.microsoft.com/en-us/library/vs/alm/dd183372(v=vs.85).aspx
///
/// returns Err if any of the values are invalid.
fn read_bitmap_core_header(&mut self) -> ImageResult<()> {
// As height/width values in BMP files with core headers are only 16 bits long,
// they won't be larger than `MAX_WIDTH_HEIGHT`.
self.width = i32::from(self.reader.read_u16::<LittleEndian>()?);
self.height = i32::from(self.reader.read_u16::<LittleEndian>()?);
check_for_overflow(self.width, self.height, self.num_channels())?;
// Number of planes (format specifies that this should be 1).
if self.reader.read_u16::<LittleEndian>()? != 1 {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"More than one plane".to_owned(),
)));
}
self.bit_count = self.reader.read_u16::<LittleEndian>()?;
self.image_type = match self.bit_count {
1 | 4 | 8 => ImageType::Palette,
24 => ImageType::RGB24,
_ => {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Invalid bit count".to_owned(),
)))
}
};
Ok(())
}
/// Read BITMAPINFOHEADER https://msdn.microsoft.com/en-us/library/vs/alm/dd183376(v=vs.85).aspx
/// or BITMAPV{2|3|4|5}HEADER.
///
/// returns Err if any of the values are invalid.
fn read_bitmap_info_header(&mut self) -> ImageResult<()> {
self.width = self.reader.read_i32::<LittleEndian>()?;
self.height = self.reader.read_i32::<LittleEndian>()?;
// Width can not be negative
if self.width < 0 {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Negative width".to_owned(),
)));
} else if self.width > MAX_WIDTH_HEIGHT || self.height > MAX_WIDTH_HEIGHT {
// Limit very large image sizes to avoid OOM issues. Images with these sizes are
// unlikely to be valid anyhow.
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Image too large".to_owned(),
)));
}
if self.height == i32::min_value() {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Invalid height".to_owned(),
)));
}
// A negative height indicates a top-down DIB.
if self.height < 0 {
self.height *= -1;
self.top_down = true;
}
check_for_overflow(self.width, self.height, self.num_channels())?;
// Number of planes (format specifies that this should be 1).
if self.reader.read_u16::<LittleEndian>()? != 1 {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"More than one plane".to_owned(),
)));
}
self.bit_count = self.reader.read_u16::<LittleEndian>()?;
let image_type_u32 = self.reader.read_u32::<LittleEndian>()?;
// Top-down dibs can not be compressed.
if self.top_down && image_type_u32 != 0 && image_type_u32 != 3 {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Invalid image type for top-down image.".to_owned(),
)));
}
self.image_type = match image_type_u32 {
0 => match self.bit_count {
1 | 2 | 4 | 8 => ImageType::Palette,
16 => ImageType::RGB16,
24 => ImageType::RGB24,
32 if self.add_alpha_channel => ImageType::RGBA32,
32 => ImageType::RGB32,
_ => {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
format!("Invalid RGB bit count {}", self.bit_count),
)))
}
},
1 => match self.bit_count {
8 => ImageType::RLE8,
_ => {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Invalid RLE8 bit count".to_owned(),
)))
}
},
2 => match self.bit_count {
4 => ImageType::RLE4,
_ => {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Invalid RLE4 bit count".to_owned(),
)))
}
},
3 => match self.bit_count {
16 => ImageType::Bitfields16,
32 => ImageType::Bitfields32,
_ => {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Invalid bitfields bit count".to_owned(),
)))
}
},
4 => {
// JPEG compression is not implemented yet.
return Err(ImageError::Unsupported(
UnsupportedError::from_format_and_kind(
ImageFormat::Bmp.into(),
UnsupportedErrorKind::GenericFeature("JPEG compression".to_owned()),
),
));
}
5 => {
// PNG compression is not implemented yet.
return Err(ImageError::Unsupported(
UnsupportedError::from_format_and_kind(
ImageFormat::Bmp.into(),
UnsupportedErrorKind::GenericFeature("PNG compression".to_owned()),
),
));
}
11 | 12 | 13 => {
// CMYK types are not implemented yet.
return Err(ImageError::Unsupported(
UnsupportedError::from_format_and_kind(
ImageFormat::Bmp.into(),
UnsupportedErrorKind::GenericFeature("CMYK format".to_owned()),
),
));
}
_ => {
// Unknown compression type.
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
format!("Unknown image compression type {}", image_type_u32),
)));
}
};
// The next 12 bytes represent data array size in bytes,
// followed the horizontal and vertical printing resolutions
// We will calculate the pixel array size using width & height of image
// We're not interesting the horz or vert printing resolutions
self.reader.read_u32::<LittleEndian>()?;
self.reader.read_u32::<LittleEndian>()?;
self.reader.read_u32::<LittleEndian>()?;
self.colors_used = self.reader.read_u32::<LittleEndian>()?;
// The next 4 bytes represent number of "important" colors
// We're not interested in this value, so we'll skip it
self.reader.read_u32::<LittleEndian>()?;
Ok(())
}
fn read_bitmasks(&mut self) -> ImageResult<()> {
let r_mask = self.reader.read_u32::<LittleEndian>()?;
let g_mask = self.reader.read_u32::<LittleEndian>()?;
let b_mask = self.reader.read_u32::<LittleEndian>()?;
let a_mask = match self.bmp_header_type {
BMPHeaderType::V3 | BMPHeaderType::V4 | BMPHeaderType::V5 => {
self.reader.read_u32::<LittleEndian>()?
}
_ => 0,
};
self.bitfields = match self.image_type {
ImageType::Bitfields16 => {
Some(Bitfields::from_mask(r_mask, g_mask, b_mask, a_mask, 16)?)
}
ImageType::Bitfields32 => {
Some(Bitfields::from_mask(r_mask, g_mask, b_mask, a_mask, 32)?)
}
_ => None,
};
if self.bitfields.is_some() && a_mask != 0 {
self.add_alpha_channel = true;
}
Ok(())
}
fn read_metadata(&mut self) -> ImageResult<()> {
if !self.has_loaded_metadata {
self.read_file_header()?;
let bmp_header_offset = self.reader.seek(SeekFrom::Current(0))?;
let bmp_header_size = self.reader.read_u32::<LittleEndian>()?;
let bmp_header_end = bmp_header_offset + u64::from(bmp_header_size);
self.bmp_header_type = match bmp_header_size {
BITMAPCOREHEADER_SIZE => BMPHeaderType::Core,
BITMAPINFOHEADER_SIZE => BMPHeaderType::Info,
BITMAPV2HEADER_SIZE => BMPHeaderType::V2,
BITMAPV3HEADER_SIZE => BMPHeaderType::V3,
BITMAPV4HEADER_SIZE => BMPHeaderType::V4,
BITMAPV5HEADER_SIZE => BMPHeaderType::V5,
_ if bmp_header_size < BITMAPCOREHEADER_SIZE => {
// Size of any valid header types won't be smaller than core header type.
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Bitmap header is too small".to_owned(),
)));
}
_ => {
return Err(ImageError::Unsupported(
UnsupportedError::from_format_and_kind(
ImageFormat::Bmp.into(),
UnsupportedErrorKind::GenericFeature(format!(
"Unknown bitmap header type (size={})",
bmp_header_size
)),
),
))
}
};
match self.bmp_header_type {
BMPHeaderType::Core => {
self.read_bitmap_core_header()?;
}
BMPHeaderType::Info
| BMPHeaderType::V2
| BMPHeaderType::V3
| BMPHeaderType::V4
| BMPHeaderType::V5 => {
self.read_bitmap_info_header()?;
}
};
match self.image_type {
ImageType::Bitfields16 | ImageType::Bitfields32 => self.read_bitmasks()?,
_ => {}
};
self.reader.seek(SeekFrom::Start(bmp_header_end))?;
match self.image_type {
ImageType::Palette | ImageType::RLE4 | ImageType::RLE8 => self.read_palette()?,
_ => {}
};
if self.no_file_header {
// Use the offset of the end of metadata instead of reading a BMP file header.
self.data_offset = self.reader.seek(SeekFrom::Current(0))?;
}
self.has_loaded_metadata = true;
}
Ok(())
}
#[cfg(feature = "ico")]
#[doc(hidden)]
pub fn read_metadata_in_ico_format(&mut self) -> ImageResult<()> {
self.no_file_header = true;
self.add_alpha_channel = true;
self.read_metadata()?;
// The height field in an ICO file is doubled to account for the AND mask
// (whether or not an AND mask is actually present).
self.height /= 2;
Ok(())
}
fn get_palette_size(&mut self) -> ImageResult<usize> {
match self.colors_used {
0 => Ok(1 << self.bit_count),
_ => {
if self.colors_used > 1 << self.bit_count {
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
format!(
"Palette size {} exceeds maximum size for BMP with bit count of {}",
self.colors_used, self.bit_count
),
)));
}
Ok(self.colors_used as usize)
}
}
}
fn bytes_per_color(&self) -> usize {
match self.bmp_header_type {
BMPHeaderType::Core => 3,
_ => 4,
}
}
fn read_palette(&mut self) -> ImageResult<()> {
const MAX_PALETTE_SIZE: usize = 256; // Palette indices are u8.
let bytes_per_color = self.bytes_per_color();
let palette_size = self.get_palette_size()?;
let max_length = MAX_PALETTE_SIZE * bytes_per_color;
let length = palette_size * bytes_per_color;
let mut buf = Vec::with_capacity(max_length);
// Resize and read the palette entries to the buffer.
// We limit the buffer to at most 256 colours to avoid any oom issues as
// 8-bit images can't reference more than 256 indexes anyhow.
buf.resize(cmp::min(length, max_length), 0);
self.reader.by_ref().read_exact(&mut buf)?;
// Allocate 256 entries even if palette_size is smaller, to prevent corrupt files from
// causing an out-of-bounds array access.
match length.cmp(&max_length) {
Ordering::Greater => {
self.reader
.seek(SeekFrom::Current((length - max_length) as i64))?;
}
Ordering::Less => buf.resize(max_length, 0),
Ordering::Equal => (),
}
let p: Vec<(u8, u8, u8)> = (0..MAX_PALETTE_SIZE)
.map(|i| {
let b = buf[bytes_per_color * i];
let g = buf[bytes_per_color * i + 1];
let r = buf[bytes_per_color * i + 2];
(r, g, b)
})
.collect();
self.palette = Some(p);
Ok(())
}
fn num_channels(&self) -> usize {
if self.add_alpha_channel {
4
} else {
3
}
}
/// Create a buffer to hold the decoded pixel data.
///
/// The buffer will be large enough to hold the whole image if it requires less than
/// `MAX_INITIAL_PIXELS` times the number of channels bytes (adjusted to line up with the
/// width of a row).
fn create_pixel_data(&self) -> Vec<u8> {
let row_width = self.num_channels() * self.width as usize;
let max_pixels = self.num_channels() * MAX_INITIAL_PIXELS;
// Make sure the maximum size is whole number of rows.
let max_starting_size = max_pixels + row_width - (max_pixels % row_width);
// The buffer has its bytes initially set to 0xFF as the ICO decoder relies on it.
vec![0xFF; cmp::min(row_width * self.height as usize, max_starting_size)]
}
fn rows<'a>(&self, pixel_data: &'a mut [u8]) -> RowIterator<'a> {
let stride = self.width as usize * self.num_channels();
if self.top_down {
RowIterator {
chunks: Chunker::FromTop(pixel_data.chunks_mut(stride)),
}
} else {
RowIterator {
chunks: Chunker::FromBottom(pixel_data.chunks_mut(stride).rev()),
}
}
}
fn read_palettized_pixel_data(&mut self) -> ImageResult<Vec<u8>> {
let mut pixel_data = self.create_pixel_data();
let num_channels = self.num_channels();
let row_byte_length = ((i32::from(self.bit_count) * self.width + 31) / 32 * 4) as usize;
let mut indices = vec![0; row_byte_length];
let palette = self.palette.as_ref().unwrap();
let bit_count = self.bit_count;
let reader = &mut self.reader;
let width = self.width as usize;
reader.seek(SeekFrom::Start(self.data_offset))?;
with_rows(
&mut pixel_data,
self.width,
self.height,
num_channels,
self.top_down,
|row| {
reader.read_exact(&mut indices)?;
let mut pixel_iter = row.chunks_mut(num_channels);
match bit_count {
1 => {
set_1bit_pixel_run(&mut pixel_iter, palette, indices.iter());
}
2 => {
set_2bit_pixel_run(&mut pixel_iter, palette, indices.iter(), width);
}
4 => {
set_4bit_pixel_run(&mut pixel_iter, palette, indices.iter(), width);
}
8 => {
set_8bit_pixel_run(&mut pixel_iter, palette, indices.iter(), width);
}
_ => panic!(),
};
Ok(())
},
)?;
Ok(pixel_data)
}
fn read_16_bit_pixel_data(&mut self, bitfields: Option<&Bitfields>) -> ImageResult<Vec<u8>> {
let mut pixel_data = self.create_pixel_data();
let num_channels = self.num_channels();
let row_padding_len = self.width as usize % 2 * 2;
let row_padding = &mut [0; 2][..row_padding_len];
let bitfields = match bitfields {
Some(b) => b,
None => self.bitfields.as_ref().unwrap(),
};
let reader = &mut self.reader;
reader.seek(SeekFrom::Start(self.data_offset))?;
with_rows(
&mut pixel_data,
self.width,
self.height,
num_channels,
self.top_down,
|row| {
for pixel in row.chunks_mut(num_channels) {
let data = u32::from(reader.read_u16::<LittleEndian>()?);
pixel[0] = bitfields.r.read(data);
pixel[1] = bitfields.g.read(data);
pixel[2] = bitfields.b.read(data);
if num_channels == 4 {
pixel[3] = bitfields.a.read(data);
}
}
reader.read_exact(row_padding)
},
)?;
Ok(pixel_data)
}
/// Read image data from a reader in 32-bit formats that use bitfields.
fn read_32_bit_pixel_data(&mut self) -> ImageResult<Vec<u8>> {
let mut pixel_data = self.create_pixel_data();
let num_channels = self.num_channels();
let bitfields = self.bitfields.as_ref().unwrap();
let reader = &mut self.reader;
reader.seek(SeekFrom::Start(self.data_offset))?;
with_rows(
&mut pixel_data,
self.width,
self.height,
num_channels,
self.top_down,
|row| {
for pixel in row.chunks_mut(num_channels) {
let data = reader.read_u32::<LittleEndian>()?;
pixel[0] = bitfields.r.read(data);
pixel[1] = bitfields.g.read(data);
pixel[2] = bitfields.b.read(data);
if num_channels == 4 {
pixel[3] = bitfields.a.read(data);
}
}
Ok(())
},
)?;
Ok(pixel_data)
}
/// Read image data from a reader where the colours are stored as 8-bit values (24 or 32-bit).
fn read_full_byte_pixel_data(&mut self, format: &FormatFullBytes) -> ImageResult<Vec<u8>> {
let mut pixel_data = self.create_pixel_data();
let num_channels = self.num_channels();
let row_padding_len = match *format {
FormatFullBytes::RGB24 => (4 - (self.width as usize * 3) % 4) % 4,
_ => 0,
};
let row_padding = &mut [0; 4][..row_padding_len];
self.reader.seek(SeekFrom::Start(self.data_offset))?;
let reader = &mut self.reader;
with_rows(
&mut pixel_data,
self.width,
self.height,
num_channels,
self.top_down,
|row| {
for pixel in row.chunks_mut(num_channels) {
if *format == FormatFullBytes::Format888 {
reader.read_u8()?;
}
// Read the colour values (b, g, r).
// Reading 3 bytes and reversing them is significantly faster than reading one
// at a time.
reader.read_exact(&mut pixel[0..3])?;
pixel[0..3].reverse();
if *format == FormatFullBytes::RGB32 {
reader.read_u8()?;
}
// Read the alpha channel if present
if *format == FormatFullBytes::RGBA32 {
reader.read_exact(&mut pixel[3..4])?;
}
}
reader.read_exact(row_padding)
},
)?;
Ok(pixel_data)
}
fn read_rle_data(&mut self, image_type: ImageType) -> ImageResult<Vec<u8>> {
// Seek to the start of the actual image data.
self.reader.seek(SeekFrom::Start(self.data_offset))?;
let full_image_size =
num_bytes(self.width, self.height, self.num_channels()).ok_or_else(|| {
ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Image buffer would be too large!".to_owned(),
))
})?;
let mut pixel_data = self.create_pixel_data();
let (skip_pixels, skip_rows, eof_hit) =
self.read_rle_data_step(&mut pixel_data, image_type, 0, 0)?;
// Extend the buffer if there is still data left.
// If eof_hit is true, it means that we hit an end-of-file marker in the last step and
// we won't extend the buffer further to avoid small files with a large specified size causing memory issues.
// This is only a rudimentary check, a file could still create a large buffer, but the
// file would now have to at least have some data in it.
if pixel_data.len() < full_image_size && !eof_hit {
let new = extend_buffer(&mut pixel_data, full_image_size, true);
self.read_rle_data_step(new, image_type, skip_pixels, skip_rows)?;
}
Ok(pixel_data)
}
fn read_rle_data_step(
&mut self,
mut pixel_data: &mut [u8],
image_type: ImageType,
skip_pixels: u8,
skip_rows: u8,
) -> ImageResult<(u8, u8, bool)> {
let num_channels = self.num_channels();
let mut delta_rows_left = 0;
let mut delta_pixels_left = skip_pixels;
let mut eof_hit = false;
// Scope the borrowing of pixel_data by the row iterator.
{
// Handling deltas in the RLE scheme means that we need to manually
// iterate through rows and pixels. Even if we didn't have to handle
// deltas, we have to ensure that a single runlength doesn't straddle
// two rows.
let mut row_iter = self.rows(&mut pixel_data);
// If we have previously hit a delta value,
// blank the rows that are to be skipped.
blank_bytes((&mut row_iter).take(skip_rows.into()));
let mut insns_iter = RLEInsnIterator {
r: &mut self.reader,
image_type,
};
let p = self.palette.as_ref().unwrap();
'row_loop: while let Some(row) = row_iter.next() {
let mut pixel_iter = row.chunks_mut(num_channels);
// Blank delta skipped pixels if any.
blank_bytes((&mut pixel_iter).take(delta_pixels_left.into()));
delta_pixels_left = 0;
'rle_loop: loop {
if let Some(insn) = insns_iter.next() {
match insn {
RLEInsn::EndOfFile => {
blank_bytes(pixel_iter);
blank_bytes(row_iter);
eof_hit = true;
break 'row_loop;
}
RLEInsn::EndOfRow => {
blank_bytes(pixel_iter);
break 'rle_loop;
}
RLEInsn::Delta(x_delta, y_delta) => {
if y_delta > 0 {
for n in 1..y_delta {
if let Some(row) = row_iter.next() {
// The msdn site on bitmap compression doesn't specify
// what happens to the values skipped when encountering
// a delta code, however IE and the windows image
// preview seems to replace them with black pixels,
// so we stick to that.
for b in row {
*b = 0;
}
} else {
delta_pixels_left = x_delta;
// We've reached the end of the buffer.
delta_rows_left = y_delta - n;
break 'row_loop;
}
}
}
for _ in 0..x_delta {
if let Some(pixel) = pixel_iter.next() {
for b in pixel {
*b = 0;
}
} else {
// We can't go any further in this row.
break;
}
}
}
RLEInsn::Absolute(length, indices) => {
// Absolute mode cannot span rows, so if we run
// out of pixels to process, we should stop
// processing the image.
match image_type {
ImageType::RLE8 => {
if !set_8bit_pixel_run(
&mut pixel_iter,
p,
indices.iter(),
length as usize,
) {
break 'row_loop;
}
}
ImageType::RLE4 => {
if !set_4bit_pixel_run(
&mut pixel_iter,
p,
indices.iter(),
length as usize,
) {
break 'row_loop;
}
}
_ => panic!(),
}
}
RLEInsn::PixelRun(n_pixels, palette_index) => {
// A pixel run isn't allowed to span rows, but we
// simply continue on to the next row if we run
// out of pixels to set.
match image_type {
ImageType::RLE8 => {
if !set_8bit_pixel_run(
&mut pixel_iter,
p,
repeat(&palette_index),
n_pixels as usize,
) {
break 'rle_loop;
}
}
ImageType::RLE4 => {
if !set_4bit_pixel_run(
&mut pixel_iter,
p,
repeat(&palette_index),
n_pixels as usize,
) {
break 'rle_loop;
}
}
_ => panic!(),
}
}
}
} else {
// We ran out of data while we still had rows to fill in.
return Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Not enough RLE data".to_owned(),
)));
}
}
}
}
Ok((delta_pixels_left, delta_rows_left, eof_hit))
}
/// Read the actual data of the image. This function is deliberately not public because it
/// cannot be called multiple times without seeking back the underlying reader in between.
pub(crate) fn read_image_data(&mut self, buf: &mut [u8]) -> ImageResult<()> {
let data = match self.image_type {
ImageType::Palette => self.read_palettized_pixel_data(),
ImageType::RGB16 => self.read_16_bit_pixel_data(Some(&R5_G5_B5_COLOR_MASK)),
ImageType::RGB24 => self.read_full_byte_pixel_data(&FormatFullBytes::RGB24),
ImageType::RGB32 => self.read_full_byte_pixel_data(&FormatFullBytes::RGB32),
ImageType::RGBA32 => self.read_full_byte_pixel_data(&FormatFullBytes::RGBA32),
ImageType::RLE8 => self.read_rle_data(ImageType::RLE8),
ImageType::RLE4 => self.read_rle_data(ImageType::RLE4),
ImageType::Bitfields16 => match self.bitfields {
Some(_) => self.read_16_bit_pixel_data(None),
None => Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Missing 16-bit bitfield masks".to_owned(),
))),
},
ImageType::Bitfields32 => match self.bitfields {
Some(R8_G8_B8_COLOR_MASK) => {
self.read_full_byte_pixel_data(&FormatFullBytes::Format888)
}
Some(_) => self.read_32_bit_pixel_data(),
None => Err(ImageError::Decoding(DecodingError::with_message(
ImageFormat::Bmp.into(),
"Missing 32-bit bitfield masks".to_owned(),
))),
},
}?;
buf.copy_from_slice(&data);
Ok(())
}
}
/// Wrapper struct around a `Cursor<Vec<u8>>`
pub struct BmpReader<R>(Cursor<Vec<u8>>, PhantomData<R>);
impl<R> Read for BmpReader<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.0.read(buf)
}
fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
if self.0.position() == 0 && buf.is_empty() {
mem::swap(buf, self.0.get_mut());
Ok(buf.len())
} else {
self.0.read_to_end(buf)
}
}
}
impl<'a, R: 'a + Read + Seek> ImageDecoder<'a> for BmpDecoder<R> {
type Reader = BmpReader<R>;
fn dimensions(&self) -> (u32, u32) {
(self.width as u32, self.height as u32)
}
fn color_type(&self) -> ColorType {
if self.add_alpha_channel {
ColorType::Rgba8
} else {
ColorType::Rgb8
}
}
fn into_reader(self) -> ImageResult<Self::Reader> {
Ok(BmpReader(Cursor::new(image::decoder_to_vec(self)?), PhantomData))
}
fn read_image(mut self, buf: &mut [u8]) -> ImageResult<()> {
assert_eq!(u64::try_from(buf.len()), Ok(self.total_bytes()));
self.read_image_data(buf)
}
}
impl<'a, R: 'a + Read + Seek> ImageDecoderExt<'a> for BmpDecoder<R> {
fn read_rect_with_progress<F: Fn(Progress)>(
&mut self,
x: u32,
y: u32,
width: u32,
height: u32,
buf: &mut [u8],
progress_callback: F,
) -> ImageResult<()> {
let start = self.reader.seek(SeekFrom::Current(0))?;
image::load_rect(x, y, width, height, buf, progress_callback, self, |_, _| unreachable!(),
|s, buf| { s.read_image_data(buf).map(|_| buf.len()) })?;
self.reader.seek(SeekFrom::Start(start))?;
Ok(())
}
}
#[cfg(test)]
mod test {
use super::Bitfield;
#[test]
fn test_bitfield_len() {
for len in 1..9 {
let bitfield = Bitfield { shift: 0, len };
for i in 0..(1 << len) {
let read = bitfield.read(i);
let calc = (i as f64 / ((1 << len) - 1) as f64 * 255f64).round() as u8;
if read != calc {
println!("len:{} i:{} read:{} calc:{}", len, i, read, calc);
}
assert_eq!(read, calc);
}
}
}
}
| 36.062543 | 117 | 0.49332 |
d69b60fb33c6f2ca0d74a02dc9091f59294b8743 | 43,892 | // Copyright (c) SimpleStaking, Viable Systems and Tezedge Contributors
// SPDX-License-Identifier: MIT
//! Sends blocks to the `protocol_runner`.
//! This actor is responsible for correct applying of blocks with Tezos protocol in context
//! This actor is aslo responsible for correct initialization of genesis in storage.
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::{channel, Receiver as QueueReceiver, Sender as QueueSender};
use std::sync::{Arc, Mutex};
use std::thread;
use std::thread::JoinHandle;
use std::time::{Duration, Instant};
use failure::{format_err, Error, Fail};
use riker::actors::*;
use slog::{debug, info, trace, warn, Logger};
use crypto::hash::{BlockHash, ChainId};
use storage::chain_meta_storage::ChainMetaStorageReader;
use storage::{
block_meta_storage, BlockAdditionalData, BlockHeaderWithHash, BlockMetaStorageReader,
PersistentStorage,
};
use storage::{
initialize_storage_with_genesis_block, store_applied_block_result, store_commit_genesis_result,
BlockMetaStorage, BlockStorage, BlockStorageReader, ChainMetaStorage, OperationsMetaStorage,
OperationsStorage, OperationsStorageReader, StorageError, StorageInitInfo,
};
use tezos_api::environment::TezosEnvironmentConfiguration;
use tezos_api::ffi::ApplyBlockRequest;
use tezos_wrapper::service::{
handle_protocol_service_error, ProtocolController, ProtocolServiceError,
};
use tezos_wrapper::TezosApiConnectionPool;
use crate::chain_current_head_manager::{ChainCurrentHeadManagerRef, ProcessValidatedBlock};
use crate::peer_branch_bootstrapper::{
ApplyBlockBatchDone, ApplyBlockBatchFailed, PeerBranchBootstrapperRef,
};
use crate::shell_channel::{InjectBlockOneshotResultCallback, ShellChannelMsg, ShellChannelRef};
use crate::state::{ApplyBlockBatch, StateError};
use crate::stats::apply_block_stats::{ApplyBlockStats, BlockValidationTimer};
use crate::subscription::subscribe_to_shell_shutdown;
use crate::utils::dispatch_oneshot_result;
use std::collections::VecDeque;
use tokio::sync::{OwnedSemaphorePermit, Semaphore};
type SharedJoinHandle = Arc<Mutex<Option<JoinHandle<Result<(), Error>>>>>;
/// How often to print stats in logs
const LOG_INTERVAL: Duration = Duration::from_secs(60);
/// BLocks are applied in batches, to optimize database unnecessery access between two blocks (predecessor data)
/// We also dont want to fullfill queue, to have possibility inject blocks from RPC by direct call ApplyBlock message
const BLOCK_APPLY_BATCH_MAX_TICKETS: usize = 2;
pub type ApplyBlockPermit = OwnedSemaphorePermit;
/// Message commands [`ChainFeeder`] to apply completed block.
#[derive(Clone, Debug)]
pub struct ApplyBlock {
batch: ApplyBlockBatch,
chain_id: Arc<ChainId>,
bootstrapper: Option<PeerBranchBootstrapperRef>,
/// Callback can be used to wait for apply block result
result_callback: Option<InjectBlockOneshotResultCallback>,
/// Simple lock guard, for easy synchronization
permit: Option<Arc<ApplyBlockPermit>>,
}
impl ApplyBlock {
pub fn new(
chain_id: Arc<ChainId>,
batch: ApplyBlockBatch,
result_callback: Option<InjectBlockOneshotResultCallback>,
bootstrapper: Option<PeerBranchBootstrapperRef>,
permit: Option<ApplyBlockPermit>,
) -> Self {
Self {
chain_id,
batch,
result_callback,
bootstrapper,
permit: permit.map(Arc::new),
}
}
}
/// Message commands [`ChainFeeder`] to add to the queue for scheduling
#[derive(Clone, Debug)]
pub struct ScheduleApplyBlock {
batch: ApplyBlockBatch,
chain_id: Arc<ChainId>,
bootstrapper: Option<PeerBranchBootstrapperRef>,
}
impl ScheduleApplyBlock {
pub fn new(
chain_id: Arc<ChainId>,
batch: ApplyBlockBatch,
bootstrapper: Option<PeerBranchBootstrapperRef>,
) -> Self {
Self {
chain_id,
batch,
bootstrapper,
}
}
}
/// Message commands [`ChainFeeder`] to log its internal stats.
#[derive(Clone, Debug)]
pub struct LogStats;
/// Message tells [`ChainFeeder`] that batch is done, so it can log its internal stats or schedule more batches.
#[derive(Clone, Debug)]
pub struct ApplyBlockDone {
stats: ApplyBlockStats,
}
/// Internal queue commands
pub(crate) enum Event {
ApplyBlock(ApplyBlock, ChainFeederRef),
ShuttingDown,
}
/// Feeds blocks and operations to the tezos protocol (ocaml code).
#[actor(
ShellChannelMsg,
ApplyBlock,
ScheduleApplyBlock,
LogStats,
ApplyBlockDone
)]
pub struct ChainFeeder {
/// Just for subscribing to shell shutdown channel
shell_channel: ShellChannelRef,
/// We apply blocks by batches, and this queue will be like 'waiting room'
/// Blocks from the queue will be
queue: VecDeque<ScheduleApplyBlock>,
/// Semaphore for limiting block apply queue, guarding block_applier_event_sender
/// And also we want to limit QueueSender, because we have to points of produceing ApplyBlock event (bootstrap, inject block)
apply_block_tickets: Arc<Semaphore>,
apply_block_tickets_maximum: usize,
/// Internal queue sender
block_applier_event_sender: Arc<Mutex<QueueSender<Event>>>,
/// Thread where blocks are applied will run until this is set to `false`
block_applier_run: Arc<AtomicBool>,
/// Block applier thread
block_applier_thread: SharedJoinHandle,
/// Statistics for applying blocks
apply_block_stats: ApplyBlockStats,
}
/// Reference to [chain feeder](ChainFeeder) actor
pub type ChainFeederRef = ActorRef<ChainFeederMsg>;
impl ChainFeeder {
/// Create new actor instance.
///
/// If the actor is successfully created then reference to the actor is returned.
/// Commands to the tezos protocol are transmitted via IPC channel provided by [`ipc_server`](IpcCmdServer).
///
/// This actor spawns a new thread in which it will periodically monitor [`persistent_storage`](PersistentStorage).
/// Purpose of the monitoring thread is to detect whether it is possible to apply blocks received by the p2p layer.
/// If the block can be applied, it is sent via IPC to the `protocol_runner`, where it is then applied by calling a tezos ffi.
pub fn actor(
sys: &impl ActorRefFactory,
chain_current_head_manager: ChainCurrentHeadManagerRef,
shell_channel: ShellChannelRef,
persistent_storage: PersistentStorage,
tezos_writeable_api: Arc<TezosApiConnectionPool>,
init_storage_data: StorageInitInfo,
tezos_env: TezosEnvironmentConfiguration,
log: Logger,
) -> Result<ChainFeederRef, CreateError> {
// spawn inner thread
let (block_applier_event_sender, block_applier_run, block_applier_thread) =
BlockApplierThreadSpawner::new(
chain_current_head_manager,
persistent_storage,
Arc::new(init_storage_data),
Arc::new(tezos_env),
tezos_writeable_api,
log,
)
.spawn_feeder_thread(format!("{}-block-applier-thread", ChainFeeder::name()))
.map_err(|_| CreateError::Panicked)?;
sys.actor_of_props::<ChainFeeder>(
ChainFeeder::name(),
Props::new_args((
shell_channel,
Arc::new(Mutex::new(block_applier_event_sender)),
block_applier_run,
Arc::new(Mutex::new(Some(block_applier_thread))),
BLOCK_APPLY_BATCH_MAX_TICKETS,
)),
)
}
/// The `ChainFeeder` is intended to serve as a singleton actor so that's why
/// we won't support multiple names per instance.
fn name() -> &'static str {
"chain-feeder"
}
fn send_to_queue(&self, event: Event) -> Result<(), Error> {
self.block_applier_event_sender
.lock()
.map_err(|e| format_err!("Failed to lock queue, reason: {}", e))?
.send(event)
.map_err(|e| format_err!("Failed to send to queue, reason: {}", e))
}
fn apply_completed_block(&self, msg: ApplyBlock, chain_feeder: ChainFeederRef, log: &Logger) {
// add request to queue
let result_callback = msg.result_callback.clone();
if let Err(e) = self.send_to_queue(Event::ApplyBlock(msg, chain_feeder.clone())) {
warn!(log, "Failed to send `apply block request` to queue"; "reason" => format!("{}", e));
if let Err(de) = dispatch_oneshot_result(result_callback, || {
Err(StateError::ProcessingError {
reason: format!("{}", e),
})
}) {
warn!(log, "Failed to dispatch result"; "reason" => format!("{}", de));
}
// just ping chain_feeder
chain_feeder.tell(
ApplyBlockDone {
stats: ApplyBlockStats::default(),
},
None,
);
}
}
fn add_to_batch_queue(&mut self, msg: ScheduleApplyBlock) {
self.queue.push_back(msg);
}
fn process_batch_queue(&mut self, chain_feeder: ChainFeederRef, log: &Logger) {
// try schedule batches as many permits we can get
while let Ok(permit) = self.apply_block_tickets.clone().try_acquire_owned() {
match self.queue.pop_front() {
Some(batch) => {
self.apply_completed_block(
ApplyBlock::new(
batch.chain_id,
batch.batch,
None,
batch.bootstrapper,
Some(permit),
),
chain_feeder.clone(),
log,
);
}
None => break,
}
}
}
fn update_stats(&mut self, new_stats: ApplyBlockStats) {
self.apply_block_stats.merge(new_stats);
}
}
impl
ActorFactoryArgs<(
ShellChannelRef,
Arc<Mutex<QueueSender<Event>>>,
Arc<AtomicBool>,
SharedJoinHandle,
usize,
)> for ChainFeeder
{
fn create_args(
(
shell_channel,
block_applier_event_sender,
block_applier_run,
block_applier_thread,
max_permits,
): (
ShellChannelRef,
Arc<Mutex<QueueSender<Event>>>,
Arc<AtomicBool>,
SharedJoinHandle,
usize,
),
) -> Self {
ChainFeeder {
shell_channel,
queue: VecDeque::new(),
block_applier_event_sender,
block_applier_run,
block_applier_thread,
apply_block_stats: ApplyBlockStats::default(),
apply_block_tickets: Arc::new(Semaphore::new(max_permits)),
apply_block_tickets_maximum: max_permits,
}
}
}
impl Actor for ChainFeeder {
type Msg = ChainFeederMsg;
fn pre_start(&mut self, ctx: &Context<Self::Msg>) {
subscribe_to_shell_shutdown(&self.shell_channel, ctx.myself());
ctx.schedule::<Self::Msg, _>(
LOG_INTERVAL / 2,
LOG_INTERVAL,
ctx.myself(),
None,
LogStats.into(),
);
}
fn post_stop(&mut self) {
// Set the flag, and let the thread wake up. There is no race condition here, if `unpark`
// happens first, `park` will return immediately. Hence there is no risk of a deadlock.
self.block_applier_run.store(false, Ordering::Release);
let join_handle = self
.block_applier_thread
.lock()
.unwrap()
.take()
.expect("Thread join handle is missing");
join_handle.thread().unpark();
let _ = join_handle
.join()
.expect("Failed to join block applier thread");
}
fn recv(&mut self, ctx: &Context<Self::Msg>, msg: Self::Msg, sender: Sender) {
self.receive(ctx, msg, sender);
}
}
impl Receive<ApplyBlock> for ChainFeeder {
type Msg = ChainFeederMsg;
fn receive(&mut self, ctx: &Context<Self::Msg>, msg: ApplyBlock, _: Sender) {
if !self.block_applier_run.load(Ordering::Acquire) {
return;
}
self.apply_completed_block(msg, ctx.myself(), &ctx.system.log());
}
}
impl Receive<ScheduleApplyBlock> for ChainFeeder {
type Msg = ChainFeederMsg;
fn receive(&mut self, ctx: &Context<Self::Msg>, msg: ScheduleApplyBlock, _: Sender) {
if !self.block_applier_run.load(Ordering::Acquire) {
return;
}
self.add_to_batch_queue(msg);
self.process_batch_queue(ctx.myself(), &ctx.system.log());
}
}
impl Receive<ApplyBlockDone> for ChainFeeder {
type Msg = ChainFeederMsg;
fn receive(&mut self, ctx: &Context<Self::Msg>, msg: ApplyBlockDone, _: Sender) {
if !self.block_applier_run.load(Ordering::Acquire) {
return;
}
self.update_stats(msg.stats);
self.process_batch_queue(ctx.myself(), &ctx.system.log());
}
}
impl Receive<LogStats> for ChainFeeder {
type Msg = ChainFeederMsg;
fn receive(&mut self, ctx: &Context<Self::Msg>, _: LogStats, _: Sender) {
let log = ctx.system.log();
// calculate applied stats
let (last_applied, last_applied_block_level, last_applied_block_elapsed_in_secs) = {
let applied_block_lasts_count = self.apply_block_stats.applied_block_lasts_count();
if *applied_block_lasts_count > 0 {
let validation = self.apply_block_stats.print_formatted_average_times();
// collect stats before clearing
let stats = format!(
"({} blocks validated in time: {:?}, average times [{}]",
applied_block_lasts_count,
self.apply_block_stats.sum_validated_at_time(),
validation,
);
let applied_block_level = *self.apply_block_stats.applied_block_level();
let applied_block_last = self
.apply_block_stats
.applied_block_last()
.map(|i| i.elapsed().as_secs());
// clear stats for next run
self.apply_block_stats.clear_applied_block_lasts();
(stats, applied_block_level, applied_block_last)
} else {
(
format!("({} blocks)", applied_block_lasts_count),
None,
None,
)
}
};
// count queue batches
let (waiting_batch_count, waiting_batch_blocks_count) =
self.queue
.iter()
.fold((0, 0), |(batches_count, blocks_count), next_batch| {
(
batches_count + 1,
blocks_count + next_batch.batch.batch_total_size(),
)
});
let queued_batch_count = self
.apply_block_tickets_maximum
.checked_sub(self.apply_block_tickets.available_permits())
.unwrap_or(0);
info!(log, "Blocks apply info";
"queued_batch_count" => queued_batch_count,
"waiting_batch_count" => waiting_batch_count,
"waiting_batch_blocks_count" => waiting_batch_blocks_count,
"last_applied" => last_applied,
"last_applied_batch_block_level" => last_applied_block_level,
"last_applied_batch_block_elapsed_in_secs" => last_applied_block_elapsed_in_secs);
}
}
impl Receive<ShellChannelMsg> for ChainFeeder {
type Msg = ChainFeederMsg;
fn receive(&mut self, ctx: &Context<Self::Msg>, msg: ShellChannelMsg, _sender: Sender) {
if let ShellChannelMsg::ShuttingDown(_) = msg {
self.block_applier_run.store(false, Ordering::Release);
// This event just pings the inner thread to shutdown
if let Err(e) = self.send_to_queue(Event::ShuttingDown) {
warn!(ctx.system.log(), "Failed to send ShuttinDown event do internal queue"; "reason" => format!("{:?}", e));
}
}
}
}
/// Possible errors for feeding chain
#[derive(Debug, Fail)]
pub enum FeedChainError {
#[fail(display = "Cannot resolve current head, no genesis was commited")]
UnknownCurrentHeadError,
#[fail(
display = "Context is not stored, context_hash: {}, reason: {}",
context_hash, reason
)]
MissingContextError {
context_hash: String,
reason: String,
},
#[fail(display = "Storage read/write error, reason: {:?}", error)]
StorageError { error: StorageError },
#[fail(display = "Protocol service error error, reason: {:?}", error)]
ProtocolServiceError { error: ProtocolServiceError },
#[fail(display = "Block apply processing error, reason: {:?}", reason)]
ProcessingError { reason: String },
}
impl From<StorageError> for FeedChainError {
fn from(error: StorageError) -> Self {
FeedChainError::StorageError { error }
}
}
impl From<ProtocolServiceError> for FeedChainError {
fn from(error: ProtocolServiceError) -> Self {
FeedChainError::ProtocolServiceError { error }
}
}
#[derive(Clone)]
pub(crate) struct BlockApplierThreadSpawner {
/// actor for managing current head
chain_current_head_manager: ChainCurrentHeadManagerRef,
persistent_storage: PersistentStorage,
init_storage_data: Arc<StorageInitInfo>,
tezos_env: Arc<TezosEnvironmentConfiguration>,
tezos_writeable_api: Arc<TezosApiConnectionPool>,
log: Logger,
}
impl BlockApplierThreadSpawner {
pub(crate) fn new(
chain_current_head_manager: ChainCurrentHeadManagerRef,
persistent_storage: PersistentStorage,
init_storage_data: Arc<StorageInitInfo>,
tezos_env: Arc<TezosEnvironmentConfiguration>,
tezos_writeable_api: Arc<TezosApiConnectionPool>,
log: Logger,
) -> Self {
Self {
chain_current_head_manager,
persistent_storage,
tezos_writeable_api,
init_storage_data,
tezos_env,
log,
}
}
/// Spawns asynchronous thread, which process events from internal queue
fn spawn_feeder_thread(
&self,
thread_name: String,
) -> Result<
(
QueueSender<Event>,
Arc<AtomicBool>,
JoinHandle<Result<(), Error>>,
),
failure::Error,
> {
// spawn thread which processes event
let (block_applier_event_sender, mut block_applier_event_receiver) = channel();
let block_applier_run = Arc::new(AtomicBool::new(false));
let block_applier_thread = {
let chain_current_head_manager = self.chain_current_head_manager.clone();
let persistent_storage = self.persistent_storage.clone();
let tezos_writeable_api = self.tezos_writeable_api.clone();
let init_storage_data = self.init_storage_data.clone();
let tezos_env = self.tezos_env.clone();
let log = self.log.clone();
let block_applier_run = block_applier_run.clone();
thread::Builder::new().name(thread_name).spawn(move || -> Result<(), Error> {
let block_storage = BlockStorage::new(&persistent_storage);
let block_meta_storage = BlockMetaStorage::new(&persistent_storage);
let chain_meta_storage = ChainMetaStorage::new(&persistent_storage);
let operations_storage = OperationsStorage::new(&persistent_storage);
let operations_meta_storage = OperationsMetaStorage::new(&persistent_storage);
block_applier_run.store(true, Ordering::Release);
info!(log, "Chain feeder started processing");
while block_applier_run.load(Ordering::Acquire) {
match tezos_writeable_api.pool.get() {
Ok(mut protocol_controller) => match feed_chain_to_protocol(
&tezos_env,
&init_storage_data,
&block_applier_run,
&chain_current_head_manager,
&block_storage,
&block_meta_storage,
&chain_meta_storage,
&operations_storage,
&operations_meta_storage,
&protocol_controller.api,
&mut block_applier_event_receiver,
&log,
) {
Ok(()) => {
protocol_controller.set_release_on_return_to_pool();
debug!(log, "Feed chain to protocol finished")
}
Err(err) => {
protocol_controller.set_release_on_return_to_pool();
if block_applier_run.load(Ordering::Acquire) {
warn!(log, "Error while feeding chain to protocol"; "reason" => format!("{:?}", err));
}
}
},
Err(err) => {
warn!(log, "No connection from protocol runner"; "reason" => format!("{:?}", err))
}
}
}
info!(log, "Chain feeder thread finished");
Ok(())
})?
};
Ok((
block_applier_event_sender,
block_applier_run,
block_applier_thread,
))
}
}
fn feed_chain_to_protocol(
tezos_env: &TezosEnvironmentConfiguration,
init_storage_data: &StorageInitInfo,
apply_block_run: &AtomicBool,
chain_current_head_manager: &ChainCurrentHeadManagerRef,
block_storage: &BlockStorage,
block_meta_storage: &BlockMetaStorage,
chain_meta_storage: &ChainMetaStorage,
operations_storage: &OperationsStorage,
operations_meta_storage: &OperationsMetaStorage,
protocol_controller: &ProtocolController,
block_applier_event_receiver: &mut QueueReceiver<Event>,
log: &Logger,
) -> Result<(), FeedChainError> {
// at first we initialize protocol runtime and ffi context
initialize_protocol_context(
&apply_block_run,
chain_current_head_manager,
block_storage,
block_meta_storage,
chain_meta_storage,
operations_meta_storage,
&protocol_controller,
&log,
&tezos_env,
&init_storage_data,
)?;
// now just check current head (at least genesis should be there)
if chain_meta_storage
.get_current_head(&init_storage_data.chain_id)?
.is_none()
{
// this should not happen here, we applied at least genesis before
return Err(FeedChainError::UnknownCurrentHeadError);
};
// now we can start applying block
while apply_block_run.load(Ordering::Acquire) {
// let's handle event, if any
if let Ok(event) = block_applier_event_receiver.recv() {
match event {
Event::ApplyBlock(request, chain_feeder) => {
// lets apply block batch
let ApplyBlock {
batch,
bootstrapper,
chain_id,
result_callback,
permit,
} = request;
let mut last_applied: Option<Arc<BlockHash>> = None;
let mut batch_stats = Some(ApplyBlockStats::default());
let mut oneshot_result: Option<Result<(), StateError>> = None;
let mut previous_block_data_cache: Option<(
Arc<BlockHeaderWithHash>,
BlockAdditionalData,
)> = None;
// lets apply blocks in order
for block_to_apply in batch.take_all_blocks_to_apply() {
debug!(log, "Applying block";
"block_header_hash" => block_to_apply.to_base58_check(), "chain_id" => chain_id.to_base58_check());
if !apply_block_run.load(Ordering::Acquire) {
info!(log, "Shutdown detected, so stopping block batch apply immediately";
"block_header_hash" => block_to_apply.to_base58_check(), "chain_id" => chain_id.to_base58_check());
return Ok(());
}
let validated_at_timer = Instant::now();
// prepare request and data for block
// collect all required data for apply
let load_metadata_timer = Instant::now();
let apply_block_request_data = prepare_apply_request(
&block_to_apply,
chain_id.as_ref().clone(),
block_storage,
block_meta_storage,
operations_storage,
previous_block_data_cache,
);
let load_metadata_elapsed = load_metadata_timer.elapsed();
// apply block and handle result
match _apply_block(
chain_id.clone(),
block_to_apply.clone(),
apply_block_request_data,
validated_at_timer,
load_metadata_elapsed,
block_storage,
block_meta_storage,
protocol_controller,
init_storage_data,
&log,
) {
Ok(result) => {
match result {
Some((
validated_block,
block_additional_data,
block_validation_timer,
)) => {
last_applied = Some(block_to_apply);
if result_callback.is_some() {
oneshot_result = Some(Ok(()));
}
previous_block_data_cache = Some((
validated_block.block.clone(),
block_additional_data,
));
// update state
if let Some(stats) = batch_stats.as_mut() {
stats.set_applied_block_level(
validated_block.block.header.level(),
);
stats.add_block_validation_stats(
&block_validation_timer,
);
}
// notify chain current head manager (only for new applied block)
chain_current_head_manager.tell(validated_block, None);
}
None => {
last_applied = Some(block_to_apply);
if result_callback.is_some() {
oneshot_result =
Some(Err(StateError::ProcessingError {
reason: "Block/batch is already applied"
.to_string(),
}));
}
previous_block_data_cache = None;
}
}
}
Err(e) => {
warn!(log, "Block apply processing failed"; "block" => block_to_apply.to_base58_check(), "reason" => format!("{}", e));
// handle condvar immediately
if let Err(e) =
dispatch_oneshot_result(result_callback.clone(), || {
Err(StateError::ProcessingError {
reason: format!("{}", e),
})
})
{
warn!(log, "Failed to dispatch result"; "reason" => format!("{}", e));
}
if result_callback.is_some() {
// dont process next time
oneshot_result = None;
}
// notify bootstrapper with failed + last_applied
if apply_block_run.load(Ordering::Acquire) {
if let Some(bootstrapper) = bootstrapper.as_ref() {
bootstrapper.tell(
ApplyBlockBatchFailed {
failed_block: block_to_apply.clone(),
},
None,
);
}
}
// we need to fire stats here (because we can throw error potentialy)
if let Some(stats) = batch_stats.take() {
chain_feeder.tell(ApplyBlockDone { stats }, None);
}
// handle protocol error - continue or restart protocol runner?
if let FeedChainError::ProtocolServiceError { error } = e {
handle_protocol_service_error(
error,
|e| warn!(log, "Failed to apply block"; "block" => block_to_apply.to_base58_check(), "reason" => format!("{:?}", e)),
)?;
}
// just break processing and wait for another event
break;
}
}
}
// allow others as soon as possible
if let Some(permit) = permit {
drop(permit);
}
// notify condvar
if let Some(oneshot_result) = oneshot_result {
// notify condvar
if let Err(e) = dispatch_oneshot_result(result_callback, || oneshot_result)
{
warn!(log, "Failed to dispatch result"; "reason" => format!("{}", e));
}
}
// notify after batch success done
if apply_block_run.load(Ordering::Acquire) {
// fire stats
if let Some(stats) = batch_stats.take() {
chain_feeder.tell(ApplyBlockDone { stats }, None);
}
if let Some(last_applied) = last_applied {
// notify bootstrapper just on the end of the success batch
if let Some(bootstrapper) = bootstrapper {
bootstrapper.tell(ApplyBlockBatchDone { last_applied }, None);
}
}
}
}
Event::ShuttingDown => {
apply_block_run.store(false, Ordering::Release);
}
}
}
}
Ok(())
}
/// Call protocol runner to apply block
///
/// Return ProcessValidatedBlock - if block was applied or None if was already previosly applied else Err
fn _apply_block(
chain_id: Arc<ChainId>,
block_hash: Arc<BlockHash>,
apply_block_request_data: Result<
(
ApplyBlockRequest,
block_meta_storage::Meta,
Arc<BlockHeaderWithHash>,
),
FeedChainError,
>,
validated_at_timer: Instant,
load_metadata_elapsed: Duration,
block_storage: &BlockStorage,
block_meta_storage: &BlockMetaStorage,
protocol_controller: &ProtocolController,
storage_init_info: &StorageInitInfo,
log: &Logger,
) -> Result<
Option<(
ProcessValidatedBlock,
BlockAdditionalData,
BlockValidationTimer,
)>,
FeedChainError,
> {
// unwrap result
let (block_request, mut block_meta, block) = apply_block_request_data?;
// check if not already applied
if block_meta.is_applied() && storage_init_info.replay.is_none() {
info!(log, "Block is already applied (feeder)"; "block" => block_hash.to_base58_check());
return Ok(None);
}
// try apply block
let protocol_call_timer = Instant::now();
let apply_block_result = protocol_controller.apply_block(block_request)?;
let protocol_call_elapsed = protocol_call_timer.elapsed();
debug!(log, "Block was applied";
"block_header_hash" => block_hash.to_base58_check(),
"context_hash" => apply_block_result.context_hash.to_base58_check(),
"validation_result_message" => &apply_block_result.validation_result_message);
if protocol_call_elapsed.gt(&BLOCK_APPLY_DURATION_LONG_TO_LOG) {
info!(log, "Block was validated with protocol with long processing";
"block_header_hash" => block_hash.to_base58_check(),
"context_hash" => apply_block_result.context_hash.to_base58_check(),
"protocol_call_elapsed" => format!("{:?}", &protocol_call_elapsed));
}
// Lets mark header as applied and store result
// store success result
let store_result_timer = Instant::now();
let block_additional_data = store_applied_block_result(
block_storage,
block_meta_storage,
&block_hash,
apply_block_result,
&mut block_meta,
)?;
let store_result_elapsed = store_result_timer.elapsed();
Ok(Some((
ProcessValidatedBlock::new(block, chain_id),
block_additional_data,
BlockValidationTimer::new(
validated_at_timer.elapsed(),
load_metadata_elapsed,
protocol_call_elapsed,
store_result_elapsed,
),
)))
}
/// Collects complete data for applying block, if not complete, return None
fn prepare_apply_request(
block_hash: &BlockHash,
chain_id: ChainId,
block_storage: &BlockStorage,
block_meta_storage: &BlockMetaStorage,
operations_storage: &OperationsStorage,
predecessor_data_cache: Option<(Arc<BlockHeaderWithHash>, BlockAdditionalData)>,
) -> Result<
(
ApplyBlockRequest,
block_meta_storage::Meta,
Arc<BlockHeaderWithHash>,
),
FeedChainError,
> {
// get block header
let block = match block_storage.get(block_hash)? {
Some(block) => Arc::new(block),
None => {
return Err(FeedChainError::StorageError {
error: StorageError::MissingKey {
when: "prepare_apply_request".into(),
},
});
}
};
// get block_metadata
let block_meta = match block_meta_storage.get(&block_hash)? {
Some(meta) => meta,
None => {
return Err(FeedChainError::ProcessingError {
reason: "Block metadata not found".to_string(),
});
}
};
// get operations
let operations = operations_storage.get_operations(block_hash)?;
// resolve predecessor data
let (
predecessor,
(
predecessor_block_metadata_hash,
predecessor_ops_metadata_hash,
predecessor_max_operations_ttl,
),
) = resolve_block_data(
block.header.predecessor(),
block_storage,
block_meta_storage,
predecessor_data_cache,
)
.map(|(block, additional_data)| (block, additional_data.into()))?;
Ok((
ApplyBlockRequest {
chain_id,
block_header: block.header.as_ref().clone(),
pred_header: predecessor.header.as_ref().clone(),
operations: ApplyBlockRequest::convert_operations(operations),
max_operations_ttl: predecessor_max_operations_ttl as i32,
predecessor_block_metadata_hash,
predecessor_ops_metadata_hash,
},
block_meta,
block,
))
}
fn resolve_block_data(
block_hash: &BlockHash,
block_storage: &BlockStorage,
block_meta_storage: &BlockMetaStorage,
block_data_cache: Option<(Arc<BlockHeaderWithHash>, BlockAdditionalData)>,
) -> Result<(Arc<BlockHeaderWithHash>, BlockAdditionalData), FeedChainError> {
// check cache at first
if let Some(cached) = block_data_cache {
// if cached data are the same as requested, then use it from cache
if block_hash.eq(&cached.0.hash) {
return Ok(cached);
}
}
// load data from database
let block = match block_storage.get(block_hash)? {
Some(header) => Arc::new(header),
None => {
return Err(FeedChainError::StorageError {
error: StorageError::MissingKey {
when: "resolve_block_data (block_storage)".into(),
},
});
}
};
// predecessor additional data
let additional_data = match block_meta_storage.get_additional_data(block_hash)? {
Some(additional_data) => additional_data,
None => {
return Err(FeedChainError::StorageError {
error: StorageError::MissingKey {
when: "resolve_block_data (block_meta_storage)".into(),
},
});
}
};
Ok((block, additional_data))
}
/// This initializes ocaml runtime and protocol context,
/// if we start with new databazes without genesis,
/// it ensures correct initialization of storage with genesis and his data.
pub(crate) fn initialize_protocol_context(
apply_block_run: &AtomicBool,
chain_current_head_manager: &ChainCurrentHeadManagerRef,
block_storage: &BlockStorage,
block_meta_storage: &BlockMetaStorage,
chain_meta_storage: &ChainMetaStorage,
operations_meta_storage: &OperationsMetaStorage,
protocol_controller: &ProtocolController,
log: &Logger,
tezos_env: &TezosEnvironmentConfiguration,
init_storage_data: &StorageInitInfo,
) -> Result<(), FeedChainError> {
let validated_at_timer = Instant::now();
// we must check if genesis is applied, if not then we need "commit_genesis" to context
let load_metadata_timer = Instant::now();
let need_commit_genesis = init_storage_data.replay.is_some()
|| match block_meta_storage.get(&init_storage_data.genesis_block_header_hash)? {
Some(genesis_meta) => !genesis_meta.is_applied(),
None => true,
};
let load_metadata_elapsed = load_metadata_timer.elapsed();
trace!(log, "Looking for genesis if applied"; "need_commit_genesis" => need_commit_genesis);
// initialize protocol context runtime
let protocol_call_timer = Instant::now();
let context_init_info = protocol_controller.init_protocol_for_write(
need_commit_genesis,
&init_storage_data.patch_context,
init_storage_data.context_stats_db_path.clone(),
)?;
// TODO - TE-261: what happens if this fails?
// Initialize the contexct IPC server to serve reads from readonly protocol runners
protocol_controller.init_context_ipc_server()?;
let protocol_call_elapsed = protocol_call_timer.elapsed();
info!(log, "Protocol context initialized"; "context_init_info" => format!("{:?}", &context_init_info), "need_commit_genesis" => need_commit_genesis);
if need_commit_genesis {
// if we needed commit_genesis, it means, that it is apply of 0 block,
// which initiates genesis protocol in context, so we need to store some data, like we do in normal apply, see below store_apply_block_result
if let Some(genesis_context_hash) = context_init_info.genesis_commit_hash {
// at first store genesis to storage
let store_result_timer = Instant::now();
let genesis_with_hash = initialize_storage_with_genesis_block(
block_storage,
block_meta_storage,
&init_storage_data,
&tezos_env,
&genesis_context_hash,
&log,
)?;
// call get additional/json data for genesis (this must be second call, because this triggers context.checkout)
// this needs to be second step, because, this triggers context.checkout, so we need to call it after store_commit_genesis_result
let commit_data = protocol_controller.genesis_result_data(&genesis_context_hash)?;
// this, marks genesis block as applied
let _ = store_commit_genesis_result(
block_storage,
block_meta_storage,
chain_meta_storage,
operations_meta_storage,
&init_storage_data,
commit_data,
)?;
let store_result_elapsed = store_result_timer.elapsed();
let mut stats = ApplyBlockStats::default();
stats.set_applied_block_level(genesis_with_hash.header.level());
stats.add_block_validation_stats(&BlockValidationTimer::new(
validated_at_timer.elapsed(),
load_metadata_elapsed,
protocol_call_elapsed,
store_result_elapsed,
));
info!(log, "Genesis commit stored successfully";
"stats" => stats.print_formatted_average_times());
// notify listeners
if apply_block_run.load(Ordering::Acquire) {
// notify others that the block successfully applied
chain_current_head_manager.tell(
ProcessValidatedBlock::new(
Arc::new(genesis_with_hash),
Arc::new(init_storage_data.chain_id.clone()),
),
None,
);
}
}
}
Ok(())
}
const BLOCK_APPLY_DURATION_LONG_TO_LOG: Duration = Duration::from_secs(30);
| 38.808134 | 157 | 0.561355 |
2f1ea24165b012df741c9103c7b14b715c0ec7e3 | 9,976 | // Copyright (C) 2018 François Laignel <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::convert::{TryFrom, TryInto};
use glib::translate::{FromGlib, ToGlib};
use glib::value::{SetValue, SetValueOptional};
use glib::StaticType;
use serde::de::{Deserialize, Deserializer, Error};
use serde::ser;
use serde::ser::{Serialize, Serializer};
use DateTime;
#[derive(Serialize, Deserialize)]
enum DateTimeVariants {
Y(i32),
YM(i32, i32),
YMD(i32, i32, i32),
YMDhmTz(i32, i32, i32, i32, i32, f32),
YMDhmsTz(i32, i32, i32, i32, i32, f64, f32),
}
// Note: ser / de for `glib::Date` should be implemented in the `glib` crate
// However, there is no `ser_de` feature in `glib` right now. The limitation is that
// `Date` fields can only be ser / de when they are used in `Value`s (which implies
// `Array`s, `List`s, `Structure` fields and `Tag`s)
pub(crate) struct Date(glib::Date);
impl From<glib::Date> for Date {
fn from(glib_date: glib::Date) -> Self {
Date(glib_date)
}
}
impl SetValue for Date {
unsafe fn set_value(value: &mut glib::Value, this: &Self) {
glib::value::SetValue::set_value(value, &this.0);
}
}
impl SetValueOptional for Date {
unsafe fn set_value_optional(value: &mut glib::Value, this: Option<&Self>) {
glib::value::SetValueOptional::set_value_optional(value, this.map(|this| &this.0));
}
}
impl StaticType for Date {
fn static_type() -> glib::Type {
glib::Date::static_type()
}
}
impl<'a> Serialize for Date {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
DateTimeVariants::YMD(
self.0.get_year() as i32,
self.0.get_month().to_glib() as i32,
self.0.get_day() as i32,
)
.serialize(serializer)
}
}
impl<'a> Serialize for DateTime {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let variant = if self.has_second() {
DateTimeVariants::YMDhmsTz(
self.get_year(),
self.get_month(),
self.get_day(),
self.get_hour(),
self.get_minute(),
f64::from(self.get_second()) + f64::from(self.get_microsecond()) / 1_000_000f64,
self.get_time_zone_offset(),
)
} else if self.has_time() {
DateTimeVariants::YMDhmTz(
self.get_year(),
self.get_month(),
self.get_day(),
self.get_hour(),
self.get_minute(),
self.get_time_zone_offset(),
)
} else if self.has_day() {
DateTimeVariants::YMD(self.get_year(), self.get_month(), self.get_day())
} else if self.has_month() {
DateTimeVariants::YM(self.get_year(), self.get_month())
} else if self.has_year() {
DateTimeVariants::Y(self.get_year())
} else {
return Err(ser::Error::custom(format!(
"no parts could be found in `DateTime` {}",
self,
)));
};
variant.serialize(serializer)
}
}
impl TryFrom<DateTimeVariants> for Date {
type Error = &'static str;
fn try_from(dt_variant: DateTimeVariants) -> Result<Self, Self::Error> {
match dt_variant {
DateTimeVariants::YMD(y, m, d) => {
let month = glib::DateMonth::from_glib(m);
if let glib::DateMonth::__Unknown(_) = month {
return Err("Out of range `month` for `Date`");
}
Ok(Date(glib::Date::new_dmy(
d.try_into().map_err(|_| "Out of range `day` for `Date`")?,
month,
y.try_into().map_err(|_| "Out of range `year` for `Date`")?,
)))
}
_ => Err("Incompatible variant for `Date` (expecting \"YMD\")"),
}
}
}
impl<'de> Deserialize<'de> for Date {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
DateTimeVariants::deserialize(deserializer)
.and_then(|dt_variant| dt_variant.try_into().map_err(D::Error::custom))
}
}
#[allow(clippy::many_single_char_names)]
impl TryFrom<DateTimeVariants> for DateTime {
type Error = glib::BoolError;
fn try_from(dt_variant: DateTimeVariants) -> Result<Self, Self::Error> {
match dt_variant {
DateTimeVariants::Y(y) => DateTime::new_y(y),
DateTimeVariants::YM(y, m) => DateTime::new_ym(y, m),
DateTimeVariants::YMD(y, m, d) => DateTime::new_ymd(y, m, d),
DateTimeVariants::YMDhmTz(y, m, d, h, mn, tz) => {
DateTime::new(tz, y, m, d, h, mn, -1f64)
}
DateTimeVariants::YMDhmsTz(y, m, d, h, mn, s, tz) => {
DateTime::new(tz, y, m, d, h, mn, s)
}
}
}
}
impl<'de> Deserialize<'de> for DateTime {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
DateTimeVariants::deserialize(deserializer)
.and_then(|dt_variant| dt_variant.try_into().map_err(D::Error::custom))
}
}
#[cfg(test)]
mod tests {
extern crate ron;
extern crate serde_json;
use DateTime;
#[test]
fn test_serialize() {
::init().unwrap();
let mut pretty_config = ron::ser::PrettyConfig::default();
pretty_config.new_line = "".to_string();
let datetime = DateTime::new(2f32, 2018, 5, 28, 16, 6, 42.123_456f64).unwrap();
let res = ron::ser::to_string_pretty(&datetime, pretty_config.clone());
assert_eq!(
Ok("YMDhmsTz(2018, 5, 28, 16, 6, 42.123456, 2)".to_owned()),
res,
);
let res = serde_json::to_string(&datetime).unwrap();
assert_eq!(
r#"{"YMDhmsTz":[2018,5,28,16,6,42.123456,2.0]}"#.to_owned(),
res
);
let datetime = DateTime::new(2f32, 2018, 5, 28, 16, 6, -1f64).unwrap();
let res = ron::ser::to_string_pretty(&datetime, pretty_config.clone());
assert_eq!(Ok("YMDhmTz(2018, 5, 28, 16, 6, 2)".to_owned()), res,);
let datetime = DateTime::new_ymd(2018, 5, 28).unwrap();
let res = ron::ser::to_string_pretty(&datetime, pretty_config.clone());
assert_eq!(Ok("YMD(2018, 5, 28)".to_owned()), res);
let datetime = DateTime::new_ym(2018, 5).unwrap();
let res = ron::ser::to_string_pretty(&datetime, pretty_config.clone());
assert_eq!(Ok("YM(2018, 5)".to_owned()), res);
let datetime = DateTime::new_y(2018).unwrap();
let res = ron::ser::to_string_pretty(&datetime, pretty_config);
assert_eq!(Ok("Y(2018)".to_owned()), res);
}
#[test]
fn test_deserialize() {
::init().unwrap();
let datetime_ron = "YMDhmsTz(2018, 5, 28, 16, 6, 42.123456, 2)";
let datetime_de: DateTime = ron::de::from_str(datetime_ron).unwrap();
assert_eq!(
datetime_de,
DateTime::new(2f32, 2018, 5, 28, 16, 6, 42.123_456f64).unwrap()
);
let datetime_json = r#"{"YMDhmsTz":[2018,5,28,16,6,42.123456,2.0]}"#;
let datetime_de: DateTime = serde_json::from_str(datetime_json).unwrap();
assert_eq!(
datetime_de,
DateTime::new(2f32, 2018, 5, 28, 16, 6, 42.123_456f64).unwrap()
);
let datetime_ron = "YMDhmTz(2018, 5, 28, 16, 6, 2)";
let datetime_de: DateTime = ron::de::from_str(datetime_ron).unwrap();
assert_eq!(
datetime_de,
DateTime::new(2f32, 2018, 5, 28, 16, 6, -1f64).unwrap()
);
let datetime_ron = "YMD(2018, 5, 28)";
let datetime_de: DateTime = ron::de::from_str(datetime_ron).unwrap();
assert_eq!(datetime_de, DateTime::new_ymd(2018, 5, 28).unwrap());
let datetime_ron = "YM(2018, 5)";
let datetime_de: DateTime = ron::de::from_str(datetime_ron).unwrap();
assert_eq!(datetime_de, DateTime::new_ym(2018, 5).unwrap());
let datetime_ron = "Y(2018)";
let datetime_de: DateTime = ron::de::from_str(datetime_ron).unwrap();
assert_eq!(datetime_de, DateTime::new_y(2018).unwrap());
}
#[test]
fn test_serde_roundtrip() {
::init().unwrap();
let datetime = DateTime::new(2f32, 2018, 5, 28, 16, 6, 42.123_456f64).unwrap();
let datetime_ser = ron::ser::to_string(&datetime).unwrap();
let datetime_de: DateTime = ron::de::from_str(datetime_ser.as_str()).unwrap();
assert_eq!(datetime_de, datetime);
let datetime = DateTime::new(2f32, 2018, 5, 28, 16, 6, -1f64).unwrap();
let datetime_ser = ron::ser::to_string(&datetime).unwrap();
let datetime_de: DateTime = ron::de::from_str(datetime_ser.as_str()).unwrap();
assert_eq!(datetime_de, datetime);
let datetime = DateTime::new_ymd(2018, 5, 28).unwrap();
let datetime_ser = ron::ser::to_string(&datetime).unwrap();
let datetime_de: DateTime = ron::de::from_str(datetime_ser.as_str()).unwrap();
assert_eq!(datetime_de, datetime);
let datetime = DateTime::new_ym(2018, 5).unwrap();
let datetime_ser = ron::ser::to_string(&datetime).unwrap();
let datetime_de: DateTime = ron::de::from_str(datetime_ser.as_str()).unwrap();
assert_eq!(datetime_de, datetime);
let datetime = DateTime::new_y(2018).unwrap();
let datetime_ser = ron::ser::to_string(&datetime).unwrap();
let datetime_de: DateTime = ron::de::from_str(datetime_ser.as_str()).unwrap();
assert_eq!(datetime_de, datetime);
}
}
| 36.276364 | 96 | 0.5832 |
26f2c34e7d247401cbc39cbd8a9cc66378206648 | 5,772 | mod instruction;
use std::collections::HashMap;
use crate::ssa;
use super::asm;
pub fn translate(module: ssa::Module) -> asm::Assembly {
let selector = InstructionSelector::new();
selector.translate(module)
}
struct InstructionSelector {
assembly: asm::Assembly,
stack_offsets: HashMap<ssa::InstructionId, asm::Operand>,
cur_func_name: String,
geps: HashMap<ssa::InstructionId, asm::Operand>,
}
impl InstructionSelector {
fn new() -> Self {
Self {
assembly: asm::Assembly::new(),
stack_offsets: HashMap::new(),
cur_func_name: "".into(),
geps: HashMap::new(),
}
}
fn translate(mut self, module: ssa::Module) -> asm::Assembly {
for (_, global) in &module.globals {
self.trans_global(global);
}
for (_, function) in &module.functions {
if function.block_order.is_empty() {
continue;
}
self.trans_function(&module, function);
}
self.assembly
}
// TODO
fn trans_global(&mut self, global: &ssa::Global) {
self.assembly.data.add_data(global.name.clone(), 8);
}
fn trans_function(&mut self, module: &ssa::Module, ssa_func: &ssa::Function) {
let mut asm_func = asm::Function::new(&ssa_func.name);
self.cur_func_name = ssa_func.name.clone();
// prologue
asm_func.add_inst(asm::Instruction::new(
asm::Mnemonic::Push,
vec![asm::Operand::Register(asm::MachineRegisterKind::Rbp.into())],
));
asm_func.add_inst(asm::Instruction::new(
asm::Mnemonic::Mov,
vec![
asm::Operand::Register(asm::MachineRegisterKind::Rbp.into()),
asm::Operand::Register(asm::MachineRegisterKind::Rsp.into()),
],
));
let stack_offset = self.calc_stack_offset(ssa_func);
asm_func.add_inst(asm::Instruction::new(
asm::Mnemonic::Sub,
vec![
asm::Operand::Register(asm::MachineRegisterKind::Rsp.into()),
asm::Operand::Immediate(asm::Immediate::I32(stack_offset)),
],
));
for reg in &asm::REGS {
asm_func.add_inst(asm::Instruction::new(
asm::Mnemonic::Push,
vec![asm::Operand::Register(reg.clone().into())],
));
}
for block_id in &ssa_func.block_order {
let block = ssa_func.block(*block_id).unwrap();
asm_func.add_label(self.block_label(*block_id));
self.trans_block(module, &ssa_func, block, &mut asm_func);
}
// epilogue
asm_func.add_label(self.return_label());
for reg in asm::REGS.iter().rev() {
asm_func.add_inst(asm::Instruction::new(
asm::Mnemonic::Pop,
vec![asm::Operand::Register(reg.clone().into())],
));
}
asm_func.add_inst(asm::Instruction::new(
asm::Mnemonic::Mov,
vec![
asm::Operand::Register(asm::MachineRegisterKind::Rsp.into()),
asm::Operand::Register(asm::MachineRegisterKind::Rbp.into()),
],
));
asm_func.add_inst(asm::Instruction::new(
asm::Mnemonic::Pop,
vec![asm::Operand::Register(asm::MachineRegisterKind::Rbp.into())],
));
asm_func.add_inst(asm::Instruction::new(asm::Mnemonic::Ret, vec![]));
self.assembly.text.add_function(asm_func);
}
fn calc_stack_offset(&mut self, function: &ssa::Function) -> i32 {
// TODO
let mut stack_offset = 0;
self.stack_offsets.clear();
for block_id in &function.block_order {
let block = function.block(*block_id).unwrap();
for inst_id in &block.instructions {
let inst = function.inst(*inst_id).unwrap();
if let ssa::InstructionKind::Alloc(typ) = inst.kind {
let align = typ.reg_size().size() as i32;
let typ_size = typ.size(&function.types.borrow()) as i32;
stack_offset = Self::align_to(stack_offset, align) + typ_size;
self.stack_offsets.insert(
*inst_id,
asm::Operand::Indirect(asm::Indirect::new_imm(
asm::MachineRegisterKind::Rbp.into(),
-stack_offset,
typ.reg_size(),
)),
);
}
}
}
stack_offset
}
fn align_to(x: i32, align: i32) -> i32 {
(x + align - 1) & !(align - 1)
}
fn trans_block(
&mut self,
module: &ssa::Module,
ssa_func: &ssa::Function,
block: &ssa::Block,
asm_func: &mut asm::Function,
) {
for inst_id in &block.instructions {
let ssa_inst = ssa_func.inst(*inst_id).unwrap();
let asm_inst = self.trans_inst(module, inst_id, &ssa_inst.kind);
for inst in asm_inst {
asm_func.add_inst(inst);
}
}
let term_id = match block.terminator {
Some(term_id) => term_id,
None => return,
};
let ssa_inst = ssa_func.inst(term_id).unwrap();
let asm_inst = self.trans_term(term_id, &ssa_inst.kind);
for inst in asm_inst {
asm_func.add_inst(inst);
}
}
fn block_label(&self, block_id: ssa::BlockId) -> String {
format!(".{}.{}", self.cur_func_name, block_id.index())
}
fn return_label(&self) -> String {
format!(".{}.ret", self.cur_func_name)
}
}
| 31.369565 | 82 | 0.532051 |
d6f52094a20e58f064a0ccee512529733434ff2d | 5,241 | use super::*;
use proptest::strategy::Strategy;
#[test]
fn without_small_integer_or_big_integer_or_float_returns_false() {
with_process_arc(|arc_process| {
TestRunner::new(Config::with_source_file(file!()))
.run(
&(
strategy::term::float(arc_process.clone()),
strategy::term(arc_process.clone())
.prop_filter("Right must not be a number", |v| !v.is_number()),
),
|(left, right)| {
prop_assert_eq!(native(left, right), false.into());
Ok(())
},
)
.unwrap();
});
}
#[test]
fn with_same_float_returns_true() {
with_process_arc(|arc_process| {
TestRunner::new(Config::with_source_file(file!()))
.run(&strategy::term::float(arc_process.clone()), |operand| {
prop_assert_eq!(native(operand, operand), true.into());
Ok(())
})
.unwrap();
});
}
#[test]
fn with_same_value_float_right_returns_true() {
with_process_arc(|arc_process| {
TestRunner::new(Config::with_source_file(file!()))
.run(
&any::<f64>().prop_map(|f| {
let mut heap = arc_process.acquire_heap();
(heap.float(f).unwrap(), heap.float(f).unwrap())
}),
|(left, right)| {
prop_assert_eq!(native(left.into(), right.into()), true.into());
Ok(())
},
)
.unwrap();
});
}
#[test]
fn with_different_float_right_returns_false() {
with_process_arc(|arc_process| {
TestRunner::new(Config::with_source_file(file!()))
.run(
&(
strategy::term::float(arc_process.clone()),
strategy::term::float(arc_process.clone()),
)
.prop_filter("Right and left must be different", |(left, right)| {
left != right
}),
|(left, right)| {
prop_assert_eq!(native(left, right), false.into());
Ok(())
},
)
.unwrap();
});
}
#[test]
fn with_same_value_small_integer_right_returns_true() {
with_process_arc(|arc_process| {
TestRunner::new(Config::with_source_file(file!()))
.run(
&strategy::term::small_integer_float_integral_i64().prop_map(|i| {
let mut heap = arc_process.acquire_heap();
(heap.float(i as f64).unwrap(), heap.integer(i).unwrap())
}),
|(left, right)| {
prop_assert_eq!(native(left.into(), right), true.into());
Ok(())
},
)
.unwrap();
});
}
#[test]
fn with_different_value_small_integer_right_returns_false() {
with_process_arc(|arc_process| {
TestRunner::new(Config::with_source_file(file!()))
.run(
&strategy::term::small_integer_float_integral_i64().prop_map(|i| {
let mut heap = arc_process.acquire_heap();
(heap.float(i as f64).unwrap(), heap.integer(i + 1).unwrap())
}),
|(left, right)| {
prop_assert_eq!(native(left.into(), right), false.into());
Ok(())
},
)
.unwrap();
});
}
#[test]
fn with_same_value_big_integer_right_returns_true() {
match strategy::term::big_integer_float_integral_i64() {
Some(strategy) => {
with_process_arc(|arc_process| {
TestRunner::new(Config::with_source_file(file!()))
.run(
&strategy.prop_map(|i| {
let mut heap = arc_process.acquire_heap();
(heap.float(i as f64).unwrap(), heap.integer(i).unwrap())
}),
|(left, right)| {
prop_assert_eq!(native(left.into(), right), true.into());
Ok(())
},
)
.unwrap();
});
}
None => (),
};
}
#[test]
fn with_different_value_big_integer_right_returns_false() {
match strategy::term::big_integer_float_integral_i64() {
Some(strategy) => {
with_process_arc(|arc_process| {
TestRunner::new(Config::with_source_file(file!()))
.run(
&strategy.prop_map(|i| {
let mut heap = arc_process.acquire_heap();
(heap.float(i as f64).unwrap(), heap.integer(i + 1).unwrap())
}),
|(left, right)| {
prop_assert_eq!(native(left.into(), right), false.into());
Ok(())
},
)
.unwrap();
});
}
None => (),
};
}
| 31.011834 | 89 | 0.450296 |
e6bb4e5db18d3a5c0cf9c082aaa430ac53ddb80e | 11,181 | //! The exception module contains all the exception kinds and the function to handle exceptions.
use crate::{
cpu::{Cpu, Mode},
csr::*,
};
/// All the exception kinds.
#[derive(Debug, PartialEq)]
pub enum Exception {
/// With the addition of the C extension, no instructions can raise
/// instruction-address-misaligned exceptions.
InstructionAddressMisaligned,
InstructionAccessFault,
IllegalInstruction(u64),
Breakpoint,
LoadAddressMisaligned,
LoadAccessFault,
StoreAMOAddressMisaligned,
StoreAMOAccessFault,
EnvironmentCallFromUMode,
EnvironmentCallFromSMode,
EnvironmentCallFromMMode,
// Stores a trap value (the faulting address) for page fault exceptions.
InstructionPageFault(u64),
LoadPageFault(u64),
StoreAMOPageFault(u64),
}
/// All the trap kinds.
#[derive(Debug)]
pub enum Trap {
/// The trap is visible to, and handled by, software running inside the execution
/// environment.
Contained,
/// The trap is a synchronous exception that is an explicit call to the execution
/// environment requesting an action on behalf of software inside the execution environment.
Requested,
/// The trap is handled transparently by the execution environment and execution
/// resumes normally after the trap is handled.
Invisible,
/// The trap represents a fatal failure and causes the execution environment to terminate
/// execution.
Fatal,
}
impl Exception {
fn exception_code(&self) -> u64 {
match self {
Exception::InstructionAddressMisaligned => 0,
Exception::InstructionAccessFault => 1,
Exception::IllegalInstruction(_) => 2,
Exception::Breakpoint => 3,
Exception::LoadAddressMisaligned => 4,
Exception::LoadAccessFault => 5,
Exception::StoreAMOAddressMisaligned => 6,
Exception::StoreAMOAccessFault => 7,
Exception::EnvironmentCallFromUMode => 8,
Exception::EnvironmentCallFromSMode => 9,
Exception::EnvironmentCallFromMMode => 11,
Exception::InstructionPageFault(_) => 12,
Exception::LoadPageFault(_) => 13,
Exception::StoreAMOPageFault(_) => 15,
}
}
fn epc(&self, pc: u64) -> u64 {
// 3.2.1 Environment Call and Breakpoint
// "ECALL and EBREAK cause the receiving privilege mode’s epc register to be set to the
// address of the ECALL or EBREAK instruction itself, not the address of the following
// instruction."
match self {
Exception::Breakpoint
| Exception::EnvironmentCallFromUMode
| Exception::EnvironmentCallFromSMode
| Exception::EnvironmentCallFromMMode
// TODO: why page fault needs this?
| Exception::InstructionPageFault(_)
| Exception::LoadPageFault(_)
| Exception::StoreAMOPageFault(_) => pc,
_ => pc.wrapping_add(4),
}
}
fn trap_value(&self, pc: u64) -> u64 {
// 3.1.17 Machine Trap Value Register (mtval)
// 4.1.9 Supervisor Trap Value Register (stval)
// "When a hardware breakpoint is triggered, or an address-misaligned, access-fault, or
// page-fault exception occurs on an instruction fetch, load, or store, mtval (stval) is
// written with the faulting virtual address. On an illegal instruction trap, mtval (stval)
// may be written with the first XLEN or ILEN bits of the faulting instruction as described
// below. For other traps, mtval (stval) is set to zero, but a future standard may redefine
// mtval's (stval's) setting for other traps."
match self {
Exception::InstructionAddressMisaligned
| Exception::InstructionAccessFault
| Exception::Breakpoint
| Exception::LoadAddressMisaligned
| Exception::LoadAccessFault
| Exception::StoreAMOAddressMisaligned
| Exception::StoreAMOAccessFault => pc,
Exception::InstructionPageFault(val)
| Exception::LoadPageFault(val)
| Exception::StoreAMOPageFault(val) => *val,
Exception::IllegalInstruction(val) => *val,
_ => 0,
}
}
/// Update CSRs and the program counter depending on an exception.
pub fn take_trap(&self, cpu: &mut Cpu) -> Trap {
// 1.2 Privilege Levels
// "Traps that increase privilege level are termed vertical traps, while traps that remain
// at the same privilege level are termed horizontal traps."
let exception_pc = self.epc(cpu.pc);
let previous_mode = cpu.mode;
let cause = self.exception_code();
// 3.1.8 Machine Trap Delegation Registers (medeleg and mideleg)
// "By default, all traps at any privilege level are handled in machine mode"
// "To increase performance, implementations can provide individual read/write bits within
// medeleg and mideleg to indicate that certain exceptions and interrupts should be
// processed directly by a lower privilege level."
//
// "medeleg has a bit position allocated for every synchronous exception shown in Table 3.6
// on page 37, with the index of the bit position equal to the value returned in the mcause
// register (i.e., setting bit 8 allows user-mode environment calls to be delegated to a
// lower-privilege trap handler)."
if previous_mode <= Mode::Supervisor && ((cpu.state.read(MEDELEG) >> cause) & 1) == 1 {
// Handle the trap in S-mode.
cpu.mode = Mode::Supervisor;
// Set the program counter to the supervisor trap-handler base address (stvec).
cpu.pc = (cpu.state.read(STVEC) & !1) as u64;
// 4.1.9 Supervisor Exception Program Counter (sepc)
// "The low bit of sepc (sepc[0]) is always zero."
// "When a trap is taken into S-mode, sepc is written with the virtual address of
// the instruction that was interrupted or that encountered the exception.
// Otherwise, sepc is never written by the implementation, though it may be
// explicitly written by software."
cpu.state.write(SEPC, exception_pc & !1);
// 4.1.10 Supervisor Cause Register (scause)
// "When a trap is taken into S-mode, scause is written with a code indicating
// the event that caused the trap. Otherwise, scause is never written by the
// implementation, though it may be explicitly written by software."
cpu.state.write(SCAUSE, cause);
// 4.1.11 Supervisor Trap Value (stval) Register
// "When a trap is taken into S-mode, stval is written with exception-specific
// information to assist software in handling the trap. Otherwise, stval is never
// written by the implementation, though it may be explicitly written by software."
cpu.state.write(STVAL, self.trap_value(exception_pc));
// Set a previous interrupt-enable bit for supervisor mode (SPIE, 5) to the value
// of a global interrupt-enable bit for supervisor mode (SIE, 1).
cpu.state
.write_bit(SSTATUS, 5, cpu.state.read_bit(SSTATUS, 1));
// Set a global interrupt-enable bit for supervisor mode (SIE, 1) to 0.
cpu.state.write_bit(SSTATUS, 1, 0);
// 4.1.1 Supervisor Status Register (sstatus)
// "When a trap is taken, SPP is set to 0 if the trap originated from user mode, or
// 1 otherwise."
match previous_mode {
Mode::User => cpu.state.write_bit(SSTATUS, 8, 0),
_ => cpu.state.write_bit(SSTATUS, 8, 1),
}
} else {
// Handle the trap in M-mode.
cpu.mode = Mode::Machine;
// Set the program counter to the machine trap-handler base address (mtvec).
cpu.pc = (cpu.state.read(MTVEC) & !1) as u64;
// 3.1.15 Machine Exception Program Counter (mepc)
// "The low bit of mepc (mepc[0]) is always zero."
// "When a trap is taken into M-mode, mepc is written with the virtual address of
// the instruction that was interrupted or that encountered the exception.
// Otherwise, mepc is never written by the implementation, though it may be
// explicitly written by software."
cpu.state.write(MEPC, exception_pc & !1);
// 3.1.16 Machine Cause Register (mcause)
// "When a trap is taken into M-mode, mcause is written with a code indicating
// the event that caused the trap. Otherwise, mcause is never written by the
// implementation, though it may be explicitly written by software."
cpu.state.write(MCAUSE, cause);
// 3.1.17 Machine Trap Value (mtval) Register
// "When a trap is taken into M-mode, mtval is either set to zero or written with
// exception-specific information to assist software in handling the trap.
// Otherwise, mtval is never written by the implementation, though it may be
// explicitly written by software."
cpu.state.write(MTVAL, self.trap_value(exception_pc));
// Set a previous interrupt-enable bit for supervisor mode (MPIE, 7) to the value
// of a global interrupt-enable bit for supervisor mode (MIE, 3).
cpu.state
.write_bit(MSTATUS, 7, cpu.state.read_bit(MSTATUS, 3));
// Set a global interrupt-enable bit for supervisor mode (MIE, 3) to 0.
cpu.state.write_bit(MSTATUS, 3, 0);
// When a trap is taken from privilege mode y into privilege mode x, xPIE is set
// to the value of x IE; x IE is set to 0; and xPP is set to y.
match previous_mode {
Mode::User => cpu.state.write_bits(MSTATUS, 11..13, 0b00),
Mode::Supervisor => cpu.state.write_bits(MSTATUS, 11..13, 0b01),
Mode::Machine => cpu.state.write_bits(MSTATUS, 11..13, 0b11),
_ => panic!("previous privilege mode is invalid"),
}
}
match self {
Exception::InstructionAddressMisaligned | Exception::InstructionAccessFault => {
Trap::Fatal
}
Exception::IllegalInstruction(_) => Trap::Invisible,
Exception::Breakpoint => Trap::Requested,
Exception::LoadAddressMisaligned
| Exception::LoadAccessFault
| Exception::StoreAMOAddressMisaligned
| Exception::StoreAMOAccessFault => Trap::Fatal,
Exception::EnvironmentCallFromUMode
| Exception::EnvironmentCallFromSMode
| Exception::EnvironmentCallFromMMode => Trap::Requested,
Exception::InstructionPageFault(_)
| Exception::LoadPageFault(_)
| Exception::StoreAMOPageFault(_) => Trap::Invisible,
}
}
}
| 47.987124 | 99 | 0.625078 |
160c731f8d291c01fc07c3a1b508186d436f8b14 | 9,032 | use crate::{os, util, Error, Region, Result};
/// An iterator over the [`Region`]s that encompass an address range.
///
/// This `struct` is created by [`query_range`]. See its documentation for more.
pub struct QueryIter {
iterator: Option<os::QueryIter>,
origin: *const (),
}
impl QueryIter {
pub(crate) fn new<T>(origin: *const T, size: usize) -> Result<Self> {
let origin = origin.cast();
os::QueryIter::new(origin, size).map(|iterator| Self {
iterator: Some(iterator),
origin,
})
}
}
impl Iterator for QueryIter {
type Item = Result<Region>;
/// Advances the iterator and returns the next region.
///
/// If the iterator has been exhausted (i.e. all [`Region`]s have been
/// queried), or if an error is encountered during iteration, all further
/// invocations will return [`None`] (in the case of an error, the error will
/// be the last item that is yielded before the iterator is fused).
#[allow(clippy::missing_inline_in_public_items)]
fn next(&mut self) -> Option<Self::Item> {
let regions = self.iterator.as_mut()?;
while let Some(result) = regions.next() {
match result {
Ok(region) => {
let range = region.as_range();
// Skip the region if it is prior to the queried range
if range.end <= self.origin as usize {
continue;
}
// Stop iteration if the region is after the queried range
if range.start >= regions.upper_bound() {
break;
}
return Some(Ok(region));
}
Err(error) => {
self.iterator.take();
return Some(Err(error));
}
}
}
self.iterator.take();
None
}
}
impl std::iter::FusedIterator for QueryIter {}
unsafe impl Send for QueryIter {}
unsafe impl Sync for QueryIter {}
/// Queries the OS with an address, returning the region it resides within.
///
/// If the queried address does not reside within any mapped region, or if it's
/// outside the process' address space, the function will error with
/// [`Error::UnmappedRegion`].
///
/// # Parameters
///
/// - The enclosing region can be of multiple page sizes.
/// - The address is rounded down to the closest page boundary.
///
/// # Errors
///
/// - If an interaction with the underlying operating system fails, an error
/// will be returned.
///
/// # Examples
///
/// ```
/// # fn main() -> region::Result<()> {
/// use region::Protection;
///
/// let data = [0; 100];
/// let region = region::query(data.as_ptr())?;
///
/// assert_eq!(region.protection(), Protection::READ_WRITE);
/// # Ok(())
/// # }
/// ```
#[inline]
pub fn query<T>(address: *const T) -> Result<Region> {
// For UNIX systems, the address must be aligned to the closest page boundary
let (address, size) = util::round_to_page_boundaries(address, 1)?;
QueryIter::new(address, size)?
.next()
.ok_or(Error::UnmappedRegion)?
}
/// Queries the OS for mapped regions that overlap with the specified range.
///
/// The implementation clamps any input that exceeds the boundaries of a
/// process' address space. Therefore it's safe to, e.g., pass in
/// [`std::ptr::null`] and [`usize::max_value`] to iterate the mapped memory
/// pages of an entire process.
///
/// If an error is encountered during iteration, the error will be the last item
/// that is yielded. Thereafter the iterator becomes fused.
///
/// A 2-byte range straddling a page boundary, will return both pages (or one
/// region, if the pages share the same properties).
///
/// This function only returns mapped regions. If required, unmapped regions can
/// be manually identified by inspecting the potential gaps between two
/// neighboring regions.
///
/// # Parameters
///
/// - The range is `[address, address + size)`
/// - The address is rounded down to the closest page boundary.
/// - The size may not be zero.
/// - The size is rounded up to the closest page boundary, relative to the
/// address.
///
/// # Errors
///
/// - If an interaction with the underlying operating system fails, an error
/// will be returned.
/// - If size is zero, [`Error::InvalidParameter`] will be returned.
///
/// # Examples
///
/// ```
/// # use region::Result;
/// # fn main() -> Result<()> {
/// let data = [0; 100];
/// let region = region::query_range(data.as_ptr(), data.len())?
/// .collect::<Result<Vec<_>>>()?;
///
/// assert_eq!(region.len(), 1);
/// assert_eq!(region[0].protection(), region::Protection::READ_WRITE);
/// # Ok(())
/// # }
/// ```
#[inline]
pub fn query_range<T>(address: *const T, size: usize) -> Result<QueryIter> {
let (address, size) = util::round_to_page_boundaries(address, size)?;
QueryIter::new(address, size)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::util::alloc_pages;
use crate::{page, Protection};
#[test]
fn query_returns_unmapped_for_oob_address() {
let (min, max) = (std::ptr::null::<()>(), usize::max_value() as *const ());
assert!(matches!(query(min), Err(Error::UnmappedRegion)));
assert!(matches!(query(max), Err(Error::UnmappedRegion)));
}
#[test]
fn query_returns_correct_descriptor_for_text_segment() -> Result<()> {
let region = query(query_returns_correct_descriptor_for_text_segment as *const ())?;
assert_eq!(region.protection(), Protection::READ_EXECUTE);
assert_eq!(region.is_shared(), cfg!(windows));
assert!(!region.is_guarded());
Ok(())
}
#[test]
fn query_returns_one_region_for_multiple_page_allocation() -> Result<()> {
let alloc = crate::alloc(page::size() + 1, Protection::READ_EXECUTE)?;
let region = query(alloc.as_ptr::<()>())?;
assert_eq!(region.protection(), Protection::READ_EXECUTE);
assert_eq!(region.as_ptr::<()>(), alloc.as_ptr());
assert_eq!(region.len(), alloc.len());
assert!(!region.is_guarded());
Ok(())
}
#[test]
fn query_is_not_off_by_one() -> Result<()> {
let pages = [Protection::READ, Protection::READ_EXECUTE, Protection::READ];
let map = alloc_pages(&pages);
let page_mid = unsafe { map.as_ptr().add(page::size()) };
let region = query(page_mid)?;
assert_eq!(region.protection(), Protection::READ_EXECUTE);
assert_eq!(region.len(), page::size());
let region = query(unsafe { page_mid.offset(-1) })?;
assert_eq!(region.protection(), Protection::READ);
assert_eq!(region.len(), page::size());
Ok(())
}
#[test]
fn query_range_does_not_return_unmapped_regions() -> Result<()> {
let regions = query_range(std::ptr::null::<()>(), 1)?.collect::<Result<Vec<_>>>()?;
assert!(regions.is_empty());
Ok(())
}
#[test]
fn query_range_returns_both_regions_for_straddling_range() -> Result<()> {
let pages = [Protection::READ_EXECUTE, Protection::READ_WRITE];
let map = alloc_pages(&pages);
// Query an area that overlaps both pages
let address = unsafe { map.as_ptr().offset(page::size() as isize - 1) };
let regions = query_range(address, 2)?.collect::<Result<Vec<_>>>()?;
assert_eq!(regions.len(), pages.len());
for (page, region) in pages.iter().zip(regions.iter()) {
assert_eq!(*page, region.protection);
}
Ok(())
}
#[test]
fn query_range_has_inclusive_lower_and_exclusive_upper_bound() -> Result<()> {
let pages = [Protection::READ, Protection::READ_WRITE, Protection::READ];
let map = alloc_pages(&pages);
let regions = query_range(map.as_ptr(), page::size())?.collect::<Result<Vec<_>>>()?;
assert_eq!(regions.len(), 1);
assert_eq!(regions[0].protection(), Protection::READ);
let regions = query_range(map.as_ptr(), page::size() + 1)?.collect::<Result<Vec<_>>>()?;
assert_eq!(regions.len(), 2);
assert_eq!(regions[0].protection(), Protection::READ);
assert_eq!(regions[1].protection(), Protection::READ_WRITE);
Ok(())
}
#[test]
fn query_range_can_iterate_over_entire_process() -> Result<()> {
let regions =
query_range(std::ptr::null::<()>(), usize::max_value())?.collect::<Result<Vec<_>>>()?;
let (r, rw, rx) = (
Protection::READ,
Protection::READ_WRITE,
Protection::READ_EXECUTE,
);
// This test is a bit rough around the edges
assert!(regions.iter().any(|region| region.protection() == r));
assert!(regions.iter().any(|region| region.protection() == rw));
assert!(regions.iter().any(|region| region.protection() == rx));
assert!(regions.len() > 5);
Ok(())
}
#[test]
fn query_range_iterator_is_fused_after_exhaustion() -> Result<()> {
let pages = [Protection::READ, Protection::READ_WRITE];
let map = alloc_pages(&pages);
let mut iter = query_range(map.as_ptr(), page::size() + 1)?;
assert_eq!(
iter.next().transpose()?.map(|r| r.protection()),
Some(Protection::READ)
);
assert_eq!(
iter.next().transpose()?.map(|r| r.protection()),
Some(Protection::READ_WRITE)
);
assert_eq!(iter.next().transpose()?, None);
assert_eq!(iter.next().transpose()?, None);
Ok(())
}
}
| 31.470383 | 92 | 0.635186 |
deadbab1774c5f60361abccea860731757b70897 | 3,820 | use clap;
use clap::Arg;
use portus;
use slog;
use std;
use time;
use {
Alg, GenericCongAvoidAlg, GenericCongAvoidConfigReport, GenericCongAvoidConfigSS,
DEFAULT_SS_THRESH,
};
pub fn make_args<A: GenericCongAvoidAlg>(
name: &str,
logger: impl Into<Option<slog::Logger>>,
) -> Result<(Alg<A>, String), std::num::ParseIntError> {
let ss_thresh_default = format!("{}", DEFAULT_SS_THRESH);
let matches = clap::App::new(name)
.version("0.2.0")
.author("Akshay Narayan <[email protected]>")
.about("CCP implementation of a congestion avoidance algorithm")
.arg(Arg::with_name("ipc")
.long("ipc")
.help("Sets the type of ipc to use: (netlink|unix)")
.default_value("unix")
.validator(portus::algs::ipc_valid))
.arg(Arg::with_name("init_cwnd")
.long("init_cwnd")
.help("Sets the initial congestion window, in bytes. Setting 0 will use datapath default.")
.default_value("0"))
.arg(Arg::with_name("ss_thresh")
.long("ss_thresh")
.help("Sets the slow start threshold, in bytes")
.default_value(&ss_thresh_default))
.arg(Arg::with_name("ss_in_fold")
.long("ss_in_fold")
.help("Implement slow start in the datapath"))
.arg(Arg::with_name("report_per_ack")
.long("per_ack")
.help("Specifies that the datapath should send a measurement upon every ACK"))
.arg(Arg::with_name("report_per_interval")
.long("report_interval_ms")
.short("i")
.takes_value(true))
.group(clap::ArgGroup::with_name("interval")
.args(&["report_per_ack", "report_per_interval"])
.required(false))
.arg(Arg::with_name("compensate_update")
.long("compensate_update")
.help("Scale the congestion window update during slow start to compensate for reporting delay"))
.arg(Arg::with_name("deficit_timeout")
.long("deficit_timeout")
.default_value("0")
.help("Number of RTTs to wait after a loss event to allow further CWND reductions. \
Default 0 means CWND deficit counting is enforced strictly with no timeout."))
.args(&A::args())
.get_matches();
let ipc = String::from(matches.value_of("ipc").unwrap());
Ok((
Alg {
ss_thresh: u32::from_str_radix(matches.value_of("ss_thresh").unwrap(), 10)?,
init_cwnd: u32::from_str_radix(matches.value_of("init_cwnd").unwrap(), 10)?,
report_option: if matches.is_present("report_per_ack") {
GenericCongAvoidConfigReport::Ack
} else if matches.is_present("report_per_interval") {
GenericCongAvoidConfigReport::Interval(time::Duration::milliseconds(
matches
.value_of("report_per_interval")
.unwrap()
.parse()
.unwrap(),
))
} else {
GenericCongAvoidConfigReport::Rtt
},
ss: if matches.is_present("ss_in_fold") {
GenericCongAvoidConfigSS::Datapath
} else {
GenericCongAvoidConfigSS::Ccp
},
use_compensation: matches.is_present("compensate_update"),
deficit_timeout: u32::from_str_radix(matches.value_of("deficit_timeout").unwrap(), 10)?,
logger: logger.into(),
alg: A::with_args(&matches),
},
ipc,
))
}
pub fn start<A: GenericCongAvoidAlg>(ipc: &str, log: slog::Logger, alg: Alg<A>)
where
A: 'static,
{
portus::start!(ipc, Some(log), alg).unwrap()
}
| 39.381443 | 109 | 0.574607 |
62505fa626580b91aa9d028fc3acc2ffbe4547e7 | 873 | use ockam::{Context, Result, Worker};
use serde::{Deserialize, Serialize};
struct Square;
#[derive(Serialize, Deserialize)]
struct Num(usize);
impl Worker for Square {
type Message = Num;
type Context = Context;
fn handle_message(&mut self, ctx: &mut Context, msg: Num) -> Result<()> {
println!("Getting square request for number {}", msg.0);
ctx.send_message("app", Num(msg.0 * msg.0))
}
}
fn main() {
let (mut app, mut exe) = ockam::start_node();
exe.execute(async move {
app.start_worker("io.ockam.square", Square).unwrap();
let num = 3;
app.send_message("io.ockam.square", Num(num)).unwrap();
// block until it receives a message
let square = app.receive::<Num>().unwrap();
println!("App: {} ^ 2 = {}", num, square.0);
app.stop().unwrap();
})
.unwrap();
}
| 24.25 | 77 | 0.587629 |
87df89c64915dea56aae3a6b8b385c524a5bc844 | 1,266 | use crate::v_3_1_1::QoS;
#[derive(Clone, Debug, PartialEq)]
pub enum ReturnCode {
SuccessZero,
SuccessOne,
SuccessTwo,
Failure,
}
impl ReturnCode {
const ZERO: u8 = 0x00;
const ONE: u8 = 0x01;
const TWO: u8 = 0x02;
const FAILURE: u8 = 0x80;
pub fn try_from(u: u8) -> std::io::Result<Self> {
match u {
Self::ZERO => Ok(ReturnCode::SuccessZero),
Self::ONE => Ok(ReturnCode::SuccessOne),
Self::TWO => Ok(ReturnCode::SuccessTwo),
Self::FAILURE => Ok(ReturnCode::Failure),
_ => Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!("Suback payload: return code {} is not acceptable", u),
)),
}
}
pub fn from_qos(qos: &QoS) -> Self {
match *qos {
QoS::Zero => ReturnCode::SuccessZero,
QoS::One => ReturnCode::SuccessTwo,
QoS::Two => ReturnCode::SuccessTwo,
}
}
pub fn as_u8(&self) -> u8 {
match self {
&ReturnCode::SuccessZero => Self::ZERO,
&ReturnCode::SuccessOne => Self::ONE,
&ReturnCode::SuccessTwo => Self::TWO,
&ReturnCode::Failure => Self::FAILURE,
}
}
}
| 26.93617 | 79 | 0.523697 |
f9b9728bb4fbe0bddd2ec80d22b69bdc6b2694ab | 2,805 | use serde::de::Deserializer;
use serde::ser::Serializer;
use serde::{Deserialize, Serialize};
use serde_json::Value;
/// Struct representing [payloads](https://discord.com/developers/docs/topics/gateway#payloads)
/// received from the Discord gateway.
#[derive(Deserialize, Serialize, Debug)]
pub struct Payload {
pub op: Op,
#[serde(skip_serializing_if = "Option::is_none")]
pub d: Option<Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub t: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub s: Option<u64>,
}
impl Payload {
pub fn op(op: Op) -> Payload {
Payload {
op,
d: None,
s: None,
t: None,
}
}
pub fn d(&mut self, value: Value) -> &mut Self {
self.d = Some(value);
self
}
pub fn t<S: Into<String>>(&mut self, t: S) -> &mut Self {
self.t = Some(t.into());
self
}
pub fn s(&mut self, s: u64) -> &mut Self {
self.s = Some(s);
self
}
}
/// [Opcode](https://discord.com/developers/docs/topics/opcodes-and-status-codes#gateway-opcodes)
/// as will be received in a [`Payload`](Payload) received from Discord.
#[derive(Clone, Copy, Debug)]
pub enum Op {
Dispatch,
Heartbeat,
Identify,
PresenceUpdate,
VoiceStateUpdate,
Resume,
Reconnect,
RequestGuildMembers,
InvalidSession,
Hello,
HeartbeatAck,
Unknown(u8),
}
impl From<u8> for Op {
fn from(op: u8) -> Self {
use Op::*;
match op {
0 => Dispatch,
1 => Heartbeat,
2 => Identify,
3 => PresenceUpdate,
4 => VoiceStateUpdate,
6 => Resume,
7 => Reconnect,
8 => RequestGuildMembers,
9 => InvalidSession,
10 => Hello,
11 => HeartbeatAck,
n => Unknown(n),
}
}
}
impl From<Op> for u8 {
fn from(op: Op) -> Self {
use Op::*;
match op {
Dispatch => 0,
Heartbeat => 1,
Identify => 2,
PresenceUpdate => 3,
VoiceStateUpdate => 4,
Resume => 6,
Reconnect => 7,
RequestGuildMembers => 8,
InvalidSession => 9,
Hello => 10,
HeartbeatAck => 11,
Unknown(n) => n,
}
}
}
impl Serialize for Op {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_u8((*self).into())
}
}
impl<'a> Deserialize<'a> for Op {
fn deserialize<D>(deserializer: D) -> Result<Op, D::Error>
where
D: Deserializer<'a>,
{
Ok(u8::deserialize(deserializer)?.into())
}
}
| 22.804878 | 97 | 0.521925 |
ed3b860ef73ef1ed2cd7a9e282f016a87769e0f1 | 29 | mod state;
pub use state::*; | 9.666667 | 17 | 0.655172 |
229c50e26a3882f42f2b16492980362ddb12bc88 | 1,622 | use std::str::from_utf8;
use std::{env, error::Error, fs, process::exit};
use watson::*;
fn run(program: impl InterpretableProgram) -> Result<Vec<WasmValue>, &'static str> {
let mut interpreter = Interpreter::new(program)?;
let mut executor = interpreter.call("main", &[])?;
loop {
let execution_unit = executor.next_unit()?;
let response = match execution_unit {
// if an import is called, figure out what to do
ExecutionUnit::CallImport(x) => {
if x.name == "output_byte" {
let b = x.params[0].to_i32() as u8;
let chars = [b];
let text = from_utf8(&chars).unwrap();
println!("{}", text);
ExecutionResponse::DoNothing
} else if x.name == "import_byte" {
// TODO
ExecutionResponse::DoNothing
} else {
panic!("unknown import call")
}
}
// if there's nothing left to do, break out of loop
ExecutionUnit::Complete(v) => break Ok(v),
// handle other execution units with default behavior
mut x @ _ => x.evaluate()?,
};
executor.execute(response)?;
}
}
fn main() -> Result<(), Box<dyn Error>> {
let args: Vec<String> = env::args().collect();
if args.len() == 2 {
let buffer = fs::read(&args[1])?;
let program = watson::parse(&buffer)?;
run(program)?;
} else {
eprintln!("bfi <app.wasm>");
exit(1);
}
Ok(())
}
| 34.510638 | 84 | 0.49815 |
b9522b5a890b2f7a6ce3696c0b2240416f16d716 | 124 | mod challenge;
mod inputs;
pub use challenge::{all_challenges, Challenge, Example};
pub use inputs::{GroupedLines, Lines};
| 20.666667 | 56 | 0.766129 |
48de0fc7b6bac85da87f15894177dc9486356986 | 3,234 | // Copyright 2016-2020 Parity Technologies (UK) Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate libc;
/// darwin_fd_limit exists to work around an issue where launchctl on Mac OS X
/// defaults the rlimit maxfiles to 256/unlimited. The default soft limit of 256
/// ends up being far too low for our multithreaded scheduler testing, depending
/// on the number of cores available.
#[cfg(any(target_os = "macos", target_os = "ios"))]
#[allow(non_camel_case_types)]
pub fn raise_fd_limit() {
use std::cmp;
use std::io;
use std::mem::size_of_val;
use std::ptr::null_mut;
unsafe {
static CTL_KERN: libc::c_int = 1;
static KERN_MAXFILESPERPROC: libc::c_int = 29;
// The strategy here is to fetch the current resource limits, read the
// kern.maxfilesperproc sysctl value, and bump the soft resource limit for
// maxfiles up to the sysctl value.
// Fetch the kern.maxfilesperproc value
let mut mib: [libc::c_int; 2] = [CTL_KERN, KERN_MAXFILESPERPROC];
let mut maxfiles: libc::c_int = 0;
let mut size: libc::size_t = size_of_val(&maxfiles) as libc::size_t;
if libc::sysctl(&mut mib[0], 2, &mut maxfiles as *mut _ as *mut _, &mut size,
null_mut(), 0) != 0 {
let err = io::Error::last_os_error();
panic!("raise_fd_limit: error calling sysctl: {}", err);
}
// Fetch the current resource limits
let mut rlim = libc::rlimit{rlim_cur: 0, rlim_max: 0};
if libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) != 0 {
let err = io::Error::last_os_error();
panic!("raise_fd_limit: error calling getrlimit: {}", err);
}
// Bump the soft limit to the smaller of kern.maxfilesperproc and the hard
// limit
rlim.rlim_cur = cmp::min(maxfiles as libc::rlim_t, rlim.rlim_max);
// Set our newly-increased resource limit
if libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) != 0 {
let err = io::Error::last_os_error();
panic!("raise_fd_limit: error calling setrlimit: {}", err);
}
}
}
#[cfg(any(target_os = "linux"))]
#[allow(non_camel_case_types)]
pub fn raise_fd_limit() {
use libc;
use std::io;
unsafe {
// Fetch the current resource limits
let mut rlim = libc::rlimit{rlim_cur: 0, rlim_max: 0};
if libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) != 0 {
let err = io::Error::last_os_error();
panic!("raise_fd_limit: error calling getrlimit: {}", err);
}
// Set soft limit to hard imit
rlim.rlim_cur = rlim.rlim_max;
// Set our newly-increased resource limit
if libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) != 0 {
let err = io::Error::last_os_error();
panic!("raise_fd_limit: error calling setrlimit: {}", err);
}
}
}
#[cfg(not(any(target_os = "macos", target_os = "ios", target_os = "linux")))]
pub fn raise_fd_limit() {}
| 34.774194 | 80 | 0.695114 |
fe578f51b63a0e4280b580764c0458b5eb7f5139 | 406 | #![feature(type_alias_impl_trait)]
#![deny(improper_ctypes)]
pub trait Baz {}
impl Baz for () {}
type Qux = impl Baz;
fn assign() -> Qux {}
pub trait Foo {
type Assoc: 'static;
}
impl Foo for () {
type Assoc = Qux;
}
#[repr(transparent)]
pub struct A<T: Foo> {
x: &'static <T as Foo>::Assoc,
}
extern "C" {
pub fn lint_me() -> A<()>; //~ ERROR: uses type `impl Baz`
}
fn main() {}
| 13.533333 | 62 | 0.576355 |
2f6835ccf8dfe56a6f23e66050d8ed46dc730582 | 5,675 | use std::borrow::Cow;
use amethyst_core::{
ecs::{DispatcherBuilder, Resources, SystemBundle, World},
math::one,
shrev::EventChannel,
};
use amethyst_error::Error;
use winit::event::Event;
use super::{
ArcBallRotationSystem, CursorHideSystem, FlyMovementSystem, FreeRotationSystem, HideCursor,
MouseFocusUpdateSystem, WindowFocus,
};
/// The bundle that creates a flying movement system.
///
/// Note: Will not actually create a moving entity. It will only register the needed resources and
/// systems.
///
/// You might want to add `"fly_movement"` and `"free_rotation"` as dependencies of the
/// `TransformSystem` in order to apply changes made by these systems in the same frame.
/// Adding this bundle will grab the mouse, hide it and keep it centered.
///
/// # Systems
///
/// This bundle adds the following systems:
///
/// * `FlyMovementSystem`
/// * `FreeRotationSystem`
/// * `MouseFocusUpdateSystem`
/// * `CursorHideSystem`
#[derive(Debug)]
pub struct FlyControlBundle {
sensitivity_x: f32,
sensitivity_y: f32,
speed: f32,
horizontal_axis: Option<Cow<'static, str>>,
vertical_axis: Option<Cow<'static, str>>,
longitudinal_axis: Option<Cow<'static, str>>,
}
impl FlyControlBundle {
/// Builds a new fly control bundle using the provided axes as controls.
#[must_use]
pub fn new(
horizontal_axis: Option<Cow<'static, str>>,
vertical_axis: Option<Cow<'static, str>>,
longitudinal_axis: Option<Cow<'static, str>>,
) -> Self {
FlyControlBundle {
sensitivity_x: 1.0,
sensitivity_y: 1.0,
speed: one(),
horizontal_axis,
vertical_axis,
longitudinal_axis,
}
}
/// Alters the mouse sensitivity on this `FlyControlBundle`
#[must_use]
pub fn with_sensitivity(mut self, x: f32, y: f32) -> Self {
self.sensitivity_x = x;
self.sensitivity_y = y;
self
}
/// Alters the speed on this `FlyControlBundle`.
#[must_use]
pub fn with_speed(mut self, speed: f32) -> Self {
self.speed = speed;
self
}
}
impl SystemBundle for FlyControlBundle {
fn load(
&mut self,
_world: &mut World,
resources: &mut Resources,
builder: &mut DispatcherBuilder,
) -> Result<(), Error> {
builder.add_system(FlyMovementSystem {
speed: self.speed,
horizontal_axis: self.horizontal_axis.clone(),
vertical_axis: self.vertical_axis.clone(),
longitudinal_axis: self.longitudinal_axis.clone(),
});
let reader = resources
.get_mut::<EventChannel<Event<'static, ()>>>()
.expect("Window event channel not found in resources")
.register_reader();
builder.add_system(FreeRotationSystem {
sensitivity_x: self.sensitivity_x,
sensitivity_y: self.sensitivity_y,
reader,
});
resources.insert(WindowFocus::new());
let reader = resources
.get_mut::<EventChannel<Event<'static, ()>>>()
.expect("Window event channel not found in resources")
.register_reader();
builder.add_system(MouseFocusUpdateSystem { reader });
resources.insert(HideCursor::default());
builder.add_thread_local(CursorHideSystem);
Ok(())
}
}
/// The bundle that creates an arc ball movement system.
/// Note: Will not actually create a moving entity. It will only register the needed resources and systems.
/// The generic parameters A and B are the ones used in InputHandler<A,B>.
/// You might want to add `fly_movement` and `free_rotation` as dependencies of the `TransformSystem`.
/// Adding this bundle will grab the mouse, hide it and keep it centered.
///
/// See the `arc_ball_camera` example to see how to use the arc ball camera.
#[derive(Debug)]
pub struct ArcBallControlBundle {
sensitivity_x: f32,
sensitivity_y: f32,
}
impl ArcBallControlBundle {
/// Builds a new `ArcBallControlBundle` with a default sensitivity of 1.0
#[must_use]
pub fn new() -> Self {
ArcBallControlBundle {
sensitivity_x: 1.0,
sensitivity_y: 1.0,
}
}
/// Builds a new `ArcBallControlBundle` with the provided mouse sensitivity values.
#[must_use]
pub fn with_sensitivity(mut self, x: f32, y: f32) -> Self {
self.sensitivity_x = x;
self.sensitivity_y = y;
self
}
}
impl Default for ArcBallControlBundle {
fn default() -> Self {
Self::new()
}
}
impl SystemBundle for ArcBallControlBundle {
fn load(
&mut self,
_world: &mut World,
resources: &mut Resources,
builder: &mut DispatcherBuilder,
) -> Result<(), Error> {
let reader = resources
.get_mut::<EventChannel<Event<'static, ()>>>()
.expect("Window event channel not found in resources")
.register_reader();
builder.add_system(FreeRotationSystem {
sensitivity_x: self.sensitivity_x,
sensitivity_y: self.sensitivity_y,
reader,
});
builder.add_system(ArcBallRotationSystem);
resources.insert(WindowFocus::new());
let reader = resources
.get_mut::<EventChannel<Event<'static, ()>>>()
.expect("Window event channel not found in resources")
.register_reader();
builder.add_system(MouseFocusUpdateSystem { reader });
resources.insert(HideCursor::default());
builder.add_thread_local(CursorHideSystem);
Ok(())
}
}
| 29.712042 | 107 | 0.627841 |
75c01c99b5c8fcf9af64ab48e67a3914384a46d5 | 1,279 | //! Literal string model.
//! リテラル文字列モデル。
//!
//! # Examples
//!
//! ```
//! // work_number
//! ```
use crate::model::{layer110::Token, layer210::Key};
use std::fmt;
impl Default for Key {
fn default() -> Self {
Key { tokens: Vec::new() }
}
}
impl Key {
pub fn from_token(token: &Token) -> Self {
let mut m = Key::default();
m.push_token(token);
m
}
pub fn extend_tokens(&mut self, tokens: &Vec<Token>) {
self.tokens.extend(tokens.clone());
}
pub fn push_token(&mut self, token: &Token) {
self.tokens.push(token.clone());
}
pub fn to_debug_string(&self) -> String {
format!("{:?}", self)
}
pub fn to_string(&self) -> String {
format!("{}", self)
}
}
impl fmt::Display for Key {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut buf = String::new();
for token in &self.tokens {
buf.push_str(&token.to_string());
}
write!(f, "{}", buf)
}
}
impl fmt::Debug for Key {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut buf = String::new();
for token in &self.tokens {
buf.push_str(&token.to_debug_string());
}
write!(f, "{}", buf)
}
}
| 22.839286 | 58 | 0.516028 |
0e240477e9484fc2529b8fde472cdfc87cdc9a70 | 10,361 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/vhdirk/gir-files)
// DO NOT EDIT
use crate::Cookie;
#[cfg(any(feature = "v2_30", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_30")))]
use crate::CookieJarAcceptPolicy;
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
use crate::SessionFeature;
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
use crate::URI;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use glib::StaticType;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
glib::wrapper! {
#[doc(alias = "SoupCookieJar")]
pub struct CookieJar(Object<ffi::SoupCookieJar, ffi::SoupCookieJarClass>) @implements SessionFeature;
match fn {
type_ => || ffi::soup_cookie_jar_get_type(),
}
}
#[cfg(not(any(feature = "v2_24", feature = "dox")))]
glib::wrapper! {
#[doc(alias = "SoupCookieJar")]
pub struct CookieJar(Object<ffi::SoupCookieJar, ffi::SoupCookieJarClass>);
match fn {
type_ => || ffi::soup_cookie_jar_get_type(),
}
}
impl CookieJar {
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
#[doc(alias = "soup_cookie_jar_new")]
pub fn new() -> CookieJar {
assert_initialized_main_thread!();
unsafe {
from_glib_full(ffi::soup_cookie_jar_new())
}
}
}
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
impl Default for CookieJar {
fn default() -> Self {
Self::new()
}
}
pub const NONE_COOKIE_JAR: Option<&CookieJar> = None;
pub trait CookieJarExt: 'static {
#[cfg(any(feature = "v2_26", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_26")))]
#[doc(alias = "soup_cookie_jar_all_cookies")]
fn all_cookies(&self) -> Vec<Cookie>;
#[cfg(any(feature = "v2_26", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_26")))]
#[doc(alias = "soup_cookie_jar_delete_cookie")]
fn delete_cookie(&self, cookie: &mut Cookie);
#[cfg(any(feature = "v2_30", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_30")))]
#[doc(alias = "soup_cookie_jar_get_accept_policy")]
#[doc(alias = "get_accept_policy")]
fn accept_policy(&self) -> CookieJarAcceptPolicy;
#[cfg(any(feature = "v2_40", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_40")))]
#[doc(alias = "soup_cookie_jar_get_cookie_list")]
#[doc(alias = "get_cookie_list")]
fn cookie_list(&self, uri: &mut URI, for_http: bool) -> Vec<Cookie>;
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
#[doc(alias = "soup_cookie_jar_get_cookies")]
#[doc(alias = "get_cookies")]
fn cookies(&self, uri: &mut URI, for_http: bool) -> Option<glib::GString>;
#[cfg(any(feature = "v2_40", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_40")))]
#[doc(alias = "soup_cookie_jar_is_persistent")]
fn is_persistent(&self) -> bool;
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
#[doc(alias = "soup_cookie_jar_save")]
fn save(&self);
#[cfg(any(feature = "v2_30", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_30")))]
#[doc(alias = "soup_cookie_jar_set_accept_policy")]
fn set_accept_policy(&self, policy: CookieJarAcceptPolicy);
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
#[doc(alias = "soup_cookie_jar_set_cookie")]
fn set_cookie(&self, uri: &mut URI, cookie: &str);
#[cfg(any(feature = "v2_30", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_30")))]
#[doc(alias = "soup_cookie_jar_set_cookie_with_first_party")]
fn set_cookie_with_first_party(&self, uri: &mut URI, first_party: &mut URI, cookie: &str);
#[doc(alias = "read-only")]
fn is_read_only(&self) -> bool;
#[doc(alias = "changed")]
fn connect_changed<F: Fn(&Self, &Cookie, &Cookie) + 'static>(&self, f: F) -> SignalHandlerId;
#[cfg(any(feature = "v2_30", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_30")))]
#[doc(alias = "accept-policy")]
fn connect_accept_policy_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<CookieJar>> CookieJarExt for O {
#[cfg(any(feature = "v2_26", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_26")))]
fn all_cookies(&self) -> Vec<Cookie> {
unsafe {
FromGlibPtrContainer::from_glib_full(ffi::soup_cookie_jar_all_cookies(self.as_ref().to_glib_none().0))
}
}
#[cfg(any(feature = "v2_26", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_26")))]
fn delete_cookie(&self, cookie: &mut Cookie) {
unsafe {
ffi::soup_cookie_jar_delete_cookie(self.as_ref().to_glib_none().0, cookie.to_glib_none_mut().0);
}
}
#[cfg(any(feature = "v2_30", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_30")))]
fn accept_policy(&self) -> CookieJarAcceptPolicy {
unsafe {
from_glib(ffi::soup_cookie_jar_get_accept_policy(self.as_ref().to_glib_none().0))
}
}
#[cfg(any(feature = "v2_40", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_40")))]
fn cookie_list(&self, uri: &mut URI, for_http: bool) -> Vec<Cookie> {
unsafe {
FromGlibPtrContainer::from_glib_full(ffi::soup_cookie_jar_get_cookie_list(self.as_ref().to_glib_none().0, uri.to_glib_none_mut().0, for_http.into_glib()))
}
}
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
fn cookies(&self, uri: &mut URI, for_http: bool) -> Option<glib::GString> {
unsafe {
from_glib_full(ffi::soup_cookie_jar_get_cookies(self.as_ref().to_glib_none().0, uri.to_glib_none_mut().0, for_http.into_glib()))
}
}
#[cfg(any(feature = "v2_40", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_40")))]
fn is_persistent(&self) -> bool {
unsafe {
from_glib(ffi::soup_cookie_jar_is_persistent(self.as_ref().to_glib_none().0))
}
}
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
fn save(&self) {
unsafe {
ffi::soup_cookie_jar_save(self.as_ref().to_glib_none().0);
}
}
#[cfg(any(feature = "v2_30", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_30")))]
fn set_accept_policy(&self, policy: CookieJarAcceptPolicy) {
unsafe {
ffi::soup_cookie_jar_set_accept_policy(self.as_ref().to_glib_none().0, policy.into_glib());
}
}
#[cfg(any(feature = "v2_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_24")))]
fn set_cookie(&self, uri: &mut URI, cookie: &str) {
unsafe {
ffi::soup_cookie_jar_set_cookie(self.as_ref().to_glib_none().0, uri.to_glib_none_mut().0, cookie.to_glib_none().0);
}
}
#[cfg(any(feature = "v2_30", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_30")))]
fn set_cookie_with_first_party(&self, uri: &mut URI, first_party: &mut URI, cookie: &str) {
unsafe {
ffi::soup_cookie_jar_set_cookie_with_first_party(self.as_ref().to_glib_none().0, uri.to_glib_none_mut().0, first_party.to_glib_none_mut().0, cookie.to_glib_none().0);
}
}
fn is_read_only(&self) -> bool {
unsafe {
let mut value = glib::Value::from_type(<bool as StaticType>::static_type());
glib::gobject_ffi::g_object_get_property(self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"read-only\0".as_ptr() as *const _, value.to_glib_none_mut().0);
value.get().expect("Return Value for property `read-only` getter")
}
}
fn connect_changed<F: Fn(&Self, &Cookie, &Cookie) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn changed_trampoline<P: IsA<CookieJar>, F: Fn(&P, &Cookie, &Cookie) + 'static>(this: *mut ffi::SoupCookieJar, old_cookie: *mut ffi::SoupCookie, new_cookie: *mut ffi::SoupCookie, f: glib::ffi::gpointer) {
let f: &F = &*(f as *const F);
f(CookieJar::from_glib_borrow(this).unsafe_cast_ref(), &from_glib_borrow(old_cookie), &from_glib_borrow(new_cookie))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"changed\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(changed_trampoline::<Self, F> as *const ())), Box_::into_raw(f))
}
}
#[cfg(any(feature = "v2_30", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v2_30")))]
fn connect_accept_policy_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_accept_policy_trampoline<P: IsA<CookieJar>, F: Fn(&P) + 'static>(this: *mut ffi::SoupCookieJar, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer) {
let f: &F = &*(f as *const F);
f(CookieJar::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::accept-policy\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(notify_accept_policy_trampoline::<Self, F> as *const ())), Box_::into_raw(f))
}
}
}
impl fmt::Display for CookieJar {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("CookieJar")
}
}
| 40.631373 | 230 | 0.609111 |
01c0cb59b4f171742a45f12983400254d2348cbd | 15,461 | //! Split a buffer into multiple smaller buffers.
//!
//! I expect this will be very common when analyzing files - for example,
//! splitting a PE file into its segments.
//!
//! At the core, this uses a vector of `Split` instances. Each split has a
//! starting address, a name, and optionally a new base address.
//!
//! Importantly, the splitting performed here is contiguous - there can't be
//! empty spots. This means that the split can be two-way - the split-up
//! buffers can be collapsed back to the original buffer for export.
use redo::Command;
use serde::{Serialize, Deserialize};
use simple_error::{SimpleResult, SimpleError, bail};
use std::cmp::Ord;
use std::collections::HashMap;
use crate::project::h2project::H2Project;
use crate::project::h2buffer::H2Buffer;
#[derive(Serialize, Deserialize, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
pub struct Split {
pub start: usize, // Must be first!
pub new_name: String,
pub new_base: Option<usize>,
}
// splits are (offset, name, base_address)
#[derive(Serialize, Deserialize, Debug)]
pub struct ActionBufferSplitForward {
pub name: String,
pub splits: Vec<Split>,
}
#[derive(Serialize, Deserialize, Debug)]
struct ActionBufferSplitBackward {
original_name: String,
original_buffer: H2Buffer,
splits: Vec<Split>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ActionBufferSplit {
forward: Option<ActionBufferSplitForward>,
backward: Option<ActionBufferSplitBackward>,
}
impl ActionBufferSplit {
pub fn new(forward: ActionBufferSplitForward) -> Self {
ActionBufferSplit {
forward: Some(forward),
backward: None,
}
}
}
impl From<(&str, Vec<Split>)> for ActionBufferSplit {
fn from(o: (&str, Vec<Split>)) -> Self {
ActionBufferSplit {
forward: Some(ActionBufferSplitForward {
name: o.0.to_string(),
splits: o.1,
}),
backward: None,
}
}
}
impl Command for ActionBufferSplit {
type Target = H2Project;
type Error = SimpleError;
fn apply(&mut self, project: &mut H2Project) -> SimpleResult<()> {
// Get the forward instructions
let forward = match &self.forward {
Some(f) => f,
None => bail!("Failed to apply: missing context"),
};
// Get an immutable handle to the buffer for sanity checks
let buffer = project.get_buffer(&forward.name)?;
if buffer.is_populated() {
bail!("Can't split buffer: it's populated");
}
// Make sure it's populated
if forward.splits.len() < 1 {
bail!("Must have at least one split");
}
// Sort the splits - this will sort by the start
let mut sorted_splits = forward.splits.clone();
sorted_splits.sort();
// Sanity check the start
match sorted_splits.first() {
Some(first) => {
if first.start != 0 {
bail!("First split must start at 0");
}
},
None => bail!("Must have at least one split"),
};
// Create the buffers - this will allow us to validate before we start
// changing things around
let mut buffers: HashMap<String, H2Buffer> = HashMap::new();
for (index, split) in sorted_splits.iter().enumerate() {
// Check if such a buffer exists
if project.buffer_exists(&split.new_name) {
bail!("Buffer already exists in project: {}", split.new_name);
}
// Check for duplicate name
if buffers.contains_key(&split.new_name) {
bail!("Duplicate name: {}", split.new_name);
}
// Check the length
let start = split.start;
let end = match sorted_splits.get(index + 1) {
Some(s) => s.start,
None => buffer.len(),
};
if end > buffer.len() {
bail!("Split is too large");
}
// Create the new buffer
let data = Vec::from(&buffer.data[start..end]); // The subset of data
let base_address = match split.new_base {
Some(b) => b,
None => buffer.base_address + start,
};
buffers.insert(split.new_name.clone(), H2Buffer::new(data, base_address)?);
}
// This will insert all or nothing so we don't end up in a half-way state
project.buffer_insert_multiple(buffers)?;
// This shouldn't fail, but if it does, we need to do our best to back
// out the change.
let original_buffer = match project.buffer_remove(&forward.name) {
Ok(b) => b,
Err(e) => {
// Try and fix what we've done
for split in &forward.splits {
// Ignore errors here
project.buffer_remove(&split.new_name).ok();
}
return Err(e);
},
};
// Populate backward for undo
self.backward = Some(ActionBufferSplitBackward {
original_name: forward.name.clone(),
original_buffer: original_buffer,
splits: sorted_splits,
});
self.forward = None;
Ok(())
}
fn undo(&mut self, project: &mut H2Project) -> SimpleResult<()> {
let backward = match &self.backward {
Some(b) => b,
None => bail!("Failed to undo: missing context"),
};
// Remove the split buffers
for split in &backward.splits {
project.buffer_remove(&split.new_name)?;
}
// Restore the original buffer
// We have to clone to avoid putting things in a potentially bad state
project.buffer_insert(&backward.original_name, backward.original_buffer.clone_shallow(None)?)?;
// Save the action back to forward
self.forward = Some(ActionBufferSplitForward {
name: backward.original_name.clone(),
splits: backward.splits.clone(),
});
self.backward = None;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
use redo::Record;
use simple_error::SimpleResult;
use crate::actions::Action;
use crate::project::h2project::H2Project;
#[test]
fn test_action() -> SimpleResult<()> {
let mut record: Record<Action> = Record::new(
H2Project::new("name", "1.0")
);
// Create a buffer
record.apply(Action::buffer_create_from_bytes("buffer", b"ABCDEFGHIJKLMNOP".to_vec(), 0x80000000))?;
assert_eq!(true, record.target().buffer_exists("buffer"));
// Split it
record.apply(Action::buffer_split("buffer", vec![
Split { new_name: "e".to_string(), start: 4, new_base: None },
Split { new_name: "f".to_string(), start: 5, new_base: None },
Split { new_name: "g".to_string(), start: 6, new_base: None },
Split { new_name: "p".to_string(), start: 15, new_base: Some(1234) },
// Put the first one last to ensure it sorts
Split { new_name: "z".to_string(), start: 0, new_base: None },
]))?;
assert_eq!(false, record.target().buffer_exists("buffer"));
assert_eq!(true, record.target().buffer_exists("z"));
assert_eq!(b"ABCD".to_vec(), record.target().get_buffer("z")?.data);
assert_eq!(0x80000000, record.target().get_buffer("z")?.base_address);
assert_eq!(true, record.target().buffer_exists("e"));
assert_eq!(b"E".to_vec(), record.target().get_buffer("e")?.data);
assert_eq!(0x80000004, record.target().get_buffer("e")?.base_address);
assert_eq!(true, record.target().buffer_exists("f"));
assert_eq!(b"F".to_vec(), record.target().get_buffer("f")?.data);
assert_eq!(0x80000005, record.target().get_buffer("f")?.base_address);
assert_eq!(true, record.target().buffer_exists("g"));
assert_eq!(b"GHIJKLMNO".to_vec(), record.target().get_buffer("g")?.data);
assert_eq!(0x80000006, record.target().get_buffer("g")?.base_address);
assert_eq!(true, record.target().buffer_exists("p"));
assert_eq!(b"P".to_vec(), record.target().get_buffer("p")?.data);
// This base_address was customized in the split
assert_eq!(1234, record.target().get_buffer("p")?.base_address);
// Undo
record.undo()?;
assert_eq!(true, record.target().buffer_exists("buffer"));
assert_eq!(false, record.target().buffer_exists("z"));
assert_eq!(false, record.target().buffer_exists("e"));
assert_eq!(false, record.target().buffer_exists("f"));
assert_eq!(false, record.target().buffer_exists("g"));
assert_eq!(false, record.target().buffer_exists("p"));
// Redo
record.redo()?;
assert_eq!(false, record.target().buffer_exists("buffer"));
assert_eq!(true, record.target().buffer_exists("z"));
assert_eq!(b"ABCD".to_vec(), record.target().get_buffer("z")?.data);
assert_eq!(0x80000000, record.target().get_buffer("z")?.base_address);
assert_eq!(true, record.target().buffer_exists("e"));
assert_eq!(b"E".to_vec(), record.target().get_buffer("e")?.data);
assert_eq!(0x80000004, record.target().get_buffer("e")?.base_address);
assert_eq!(true, record.target().buffer_exists("f"));
assert_eq!(b"F".to_vec(), record.target().get_buffer("f")?.data);
assert_eq!(0x80000005, record.target().get_buffer("f")?.base_address);
assert_eq!(true, record.target().buffer_exists("g"));
assert_eq!(b"GHIJKLMNO".to_vec(), record.target().get_buffer("g")?.data);
assert_eq!(0x80000006, record.target().get_buffer("g")?.base_address);
assert_eq!(true, record.target().buffer_exists("p"));
assert_eq!(b"P".to_vec(), record.target().get_buffer("p")?.data);
// This base_address was customized in the split
assert_eq!(1234, record.target().get_buffer("p")?.base_address);
Ok(())
}
#[test]
fn test_fails_when_no_zero() -> SimpleResult<()> {
let mut record: Record<Action> = Record::new(
H2Project::new("name", "1.0")
);
// Create a buffer
record.apply(Action::buffer_create_from_bytes("buffer", b"ABCDEFGHIJKLMNOP".to_vec(), 0x80000000))?;
assert_eq!(true, record.target().buffer_exists("buffer"));
// Split it
assert!(record.apply(Action::buffer_split("buffer", vec![
Split { new_name: "e".to_string(), start: 4, new_base: None },
Split { new_name: "f".to_string(), start: 8, new_base: None },
])).is_err());
assert_eq!(false, record.target().buffer_exists("e"));
assert_eq!(false, record.target().buffer_exists("f"));
Ok(())
}
#[test]
fn test_fails_when_no_splits() -> SimpleResult<()> {
let mut record: Record<Action> = Record::new(
H2Project::new("name", "1.0")
);
// Create a buffer
record.apply(Action::buffer_create_from_bytes("buffer", b"ABCDEFGHIJKLMNOP".to_vec(), 0x80000000))?;
assert_eq!(true, record.target().buffer_exists("buffer"));
// Split it
assert!(record.apply(Action::buffer_split("buffer", vec![])).is_err());
Ok(())
}
#[test]
fn test_fails_when_too_long() -> SimpleResult<()> {
let mut record: Record<Action> = Record::new(
H2Project::new("name", "1.0")
);
// Create a buffer
record.apply(Action::buffer_create_from_bytes("buffer", b"ABCDEFGHIJKLMNOP".to_vec(), 0x80000000))?;
assert_eq!(true, record.target().buffer_exists("buffer"));
// Split it
assert!(record.apply(Action::buffer_split("buffer", vec![
Split { new_name: "a".to_string(), start: 0, new_base: None },
Split { new_name: "e".to_string(), start: 100, new_base: None },
])).is_err());
assert_eq!(false, record.target().buffer_exists("a"));
assert_eq!(false, record.target().buffer_exists("e"));
Ok(())
}
#[test]
fn test_fails_when_zero_sized_area() -> SimpleResult<()> {
let mut record: Record<Action> = Record::new(
H2Project::new("name", "1.0")
);
// Create a buffer
record.apply(Action::buffer_create_from_bytes("buffer", b"ABCDEFGHIJKLMNOP".to_vec(), 0x80000000))?;
assert_eq!(true, record.target().buffer_exists("buffer"));
// Split it
assert!(record.apply(Action::buffer_split("buffer", vec![
Split { new_name: "a".to_string(), start: 0, new_base: None },
Split { new_name: "e".to_string(), start: 4, new_base: None },
Split { new_name: "f".to_string(), start: 0, new_base: None },
])).is_err());
assert_eq!(false, record.target().buffer_exists("a"));
assert_eq!(false, record.target().buffer_exists("e"));
assert_eq!(false, record.target().buffer_exists("f"));
Ok(())
}
#[test]
fn test_fails_when_buffer_doesnt_exist() -> SimpleResult<()> {
let mut record: Record<Action> = Record::new(
H2Project::new("name", "1.0")
);
// Split it
assert!(record.apply(Action::buffer_split("nosuchbuffer", vec![
Split { new_name: "a".to_string(), start: 0, new_base: None },
Split { new_name: "e".to_string(), start: 4, new_base: None },
])).is_err());
assert_eq!(false, record.target().buffer_exists("a"));
assert_eq!(false, record.target().buffer_exists("e"));
Ok(())
}
#[test]
fn test_fails_when_duplicates() -> SimpleResult<()> {
let mut record: Record<Action> = Record::new(
H2Project::new("name", "1.0")
);
// Create a buffer
record.apply(Action::buffer_create_from_bytes("buffer", b"ABCDEFGHIJKLMNOP".to_vec(), 0x80000000))?;
assert_eq!(true, record.target().buffer_exists("buffer"));
// Split it
assert!(record.apply(Action::buffer_split("buffer", vec![
Split { new_name: "a".to_string(), start: 0, new_base: None },
Split { new_name: "a".to_string(), start: 4, new_base: None },
])).is_err());
assert_eq!(false, record.target().buffer_exists("a"));
Ok(())
}
#[test]
fn test_fails_when_buffer_exists() -> SimpleResult<()> {
let mut record: Record<Action> = Record::new(
H2Project::new("name", "1.0")
);
// Create a buffer
record.apply(Action::buffer_create_from_bytes("buffer", b"ABCDEFGHIJKLMNOP".to_vec(), 0x80000000))?;
record.apply(Action::buffer_create_from_bytes("exists", b"ABCDEFGHIJKLMNOP".to_vec(), 0x80000000))?;
assert_eq!(true, record.target().buffer_exists("buffer"));
// Split it
assert!(record.apply(Action::buffer_split("buffer", vec![
Split { new_name: "a".to_string(), start: 0, new_base: None },
Split { new_name: "exists".to_string(), start: 4, new_base: None },
])).is_err());
assert_eq!(false, record.target().buffer_exists("a"));
Ok(())
}
}
| 35.955814 | 108 | 0.584503 |
64306ac280836453225b90cfc0111f751dcc09dc | 796 | #![feature(test)]
extern crate ralloc;
extern crate test;
#[global_allocator]
static ALLOCATOR: ralloc::Allocator = ralloc::Allocator;
use std::sync::mpsc;
use std::thread;
#[bench]
fn bench_mpsc(b: &mut test::Bencher) {
b.iter(|| {
let (tx, rx) = mpsc::channel::<Box<u64>>();
thread::spawn(move || {
tx.send(Box::new(0xBABAFBABAF)).unwrap();
tx.send(Box::new(0xDEADBEAF)).unwrap();
tx.send(Box::new(0xDECEA5E)).unwrap();
tx.send(Box::new(0xDEC1A551F1E5)).unwrap();
});
let (ty, ry) = mpsc::channel();
for _ in 0..0xFF {
let ty = ty.clone();
thread::spawn(move || {
ty.send(Box::new(0xFA11BAD)).unwrap();
});
}
(rx, ry)
});
}
| 23.411765 | 56 | 0.515075 |
abed8b6cad100a9afd3bcb9f5a43c4f391953193 | 8,488 | use regex::{Regex, RegexBuilder};
// Identifier string matching patterns.
#[derive(Clone, Debug)]
pub enum Pattern {
// `*`
Any,
// `*foo*`
Contains(String),
// `*foo`
EndsWith(String),
// `foo`
Exact(String),
// `foo*`
StartsWith(String),
// `?foo`
Regex(Regex),
// `=1`
Equal(i64),
// `>1`
GreaterThan(i64),
// `>=1`
GreaterThanOrEqual(i64),
// `<1`
LessThan(i64),
// `<=1`
LessThanOrEqual(i64),
// `=1.0`
FEqual(f64),
// `>1`
FGreaterThan(f64),
// `>=1`
FGreaterThanOrEqual(f64),
// `<1`
FLessThan(f64),
// `<=1`
FLessThanOrEqual(f64),
}
// An identifier containing its pattern and case options
#[derive(Clone, Debug)]
pub struct Identifier {
/// Whether the identifier is case insensitive.
pub ignore_case: bool,
/// The match pattern of the identifier.
pub pattern: Pattern,
}
/// Parse data into an Identifier. This trait parses a Tau Engine identifier into an `Identifier`.
pub trait IdentifierParser {
fn into_identifier(self) -> crate::Result<Identifier>;
}
impl IdentifierParser for String {
fn into_identifier(self) -> crate::Result<Identifier> {
let (insensitive, string) = if cfg!(feature = "ignore_case") {
(true, &self[..])
} else if let Some(s) = self.strip_prefix('i') {
(true, s)
} else {
(false, &self[..])
};
let pattern = if let Some(s) = string.strip_prefix('?') {
Pattern::Regex(
RegexBuilder::new(s)
.case_insensitive(insensitive)
.build()
.map_err(crate::error::parse_invalid_ident)?,
)
} else if let Some(s) = string.strip_prefix(">=") {
if s.contains(".") {
Pattern::FGreaterThanOrEqual(
s.parse::<f64>()
.map_err(crate::error::parse_invalid_ident)?,
)
} else {
Pattern::GreaterThanOrEqual(
s.parse::<i64>()
.map_err(crate::error::parse_invalid_ident)?,
)
}
} else if let Some(s) = string.strip_prefix('>') {
if s.contains(".") {
Pattern::FGreaterThan(
s.parse::<f64>()
.map_err(crate::error::parse_invalid_ident)?,
)
} else {
Pattern::GreaterThan(
s.parse::<i64>()
.map_err(crate::error::parse_invalid_ident)?,
)
}
} else if let Some(s) = string.strip_prefix("<=") {
if s.contains(".") {
Pattern::FLessThanOrEqual(
s.parse::<f64>()
.map_err(crate::error::parse_invalid_ident)?,
)
} else {
Pattern::LessThanOrEqual(
s.parse::<i64>()
.map_err(crate::error::parse_invalid_ident)?,
)
}
} else if let Some(s) = string.strip_prefix('<') {
if s.contains(".") {
Pattern::FLessThan(
s.parse::<f64>()
.map_err(crate::error::parse_invalid_ident)?,
)
} else {
Pattern::LessThan(
s.parse::<i64>()
.map_err(crate::error::parse_invalid_ident)?,
)
}
} else if let Some(s) = string.strip_prefix('=') {
if s.contains(".") {
Pattern::FEqual(
s.parse::<f64>()
.map_err(crate::error::parse_invalid_ident)?,
)
} else {
Pattern::Equal(
s.parse::<i64>()
.map_err(crate::error::parse_invalid_ident)?,
)
}
} else if string == "*" {
Pattern::Any
} else if string.starts_with('*') && string.ends_with('*') {
let s = if insensitive {
string[1..string.len() - 1].to_lowercase()
} else {
string[1..string.len() - 1].to_string()
};
Pattern::Contains(s)
} else if let Some(s) = string.strip_prefix('*') {
let s = if insensitive {
s.to_lowercase()
} else {
s.to_string()
};
Pattern::EndsWith(s)
} else if let Some(s) = string.strip_suffix('*') {
let s = if insensitive {
s.to_lowercase()
} else {
s.to_string()
};
Pattern::StartsWith(s)
} else if (string.starts_with('"') && string.ends_with('"'))
|| (string.starts_with('\'') && string.ends_with('\''))
{
let s = if insensitive {
string[1..string.len() - 1].to_lowercase()
} else {
string[1..string.len() - 1].to_string()
};
Pattern::Exact(s)
} else {
let s = if insensitive {
string.to_lowercase()
} else {
string.to_owned()
};
Pattern::Exact(s)
};
Ok(Identifier {
ignore_case: insensitive,
pattern,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn contains() {
let identifier = "*foo*".to_owned().into_identifier().unwrap();
match identifier.pattern {
Pattern::Contains(x) => {
assert_eq!(x, "foo");
}
_ => panic!("unexpected pattern"),
}
}
#[test]
fn equal() {
let identifier = "=1".to_owned().into_identifier().unwrap();
match identifier.pattern {
Pattern::Equal(x) => {
assert_eq!(x, 1);
}
_ => panic!("unexpected pattern"),
}
}
#[test]
fn ends_with() {
let identifier = "*foo".to_owned().into_identifier().unwrap();
match identifier.pattern {
Pattern::EndsWith(x) => {
assert_eq!(x, "foo");
}
_ => panic!("unexpected pattern"),
}
}
#[test]
fn exact() {
let identifier = "foo".to_owned().into_identifier().unwrap();
match identifier.pattern {
Pattern::Exact(x) => {
assert_eq!(x, "foo");
}
_ => panic!("unexpected pattern"),
}
}
#[test]
fn greater_than() {
let identifier = ">1".to_owned().into_identifier().unwrap();
match identifier.pattern {
Pattern::GreaterThan(x) => {
assert_eq!(x, 1);
}
_ => panic!("unexpected pattern"),
}
}
#[test]
fn greater_than_or_equal() {
let identifier = ">=1".to_owned().into_identifier().unwrap();
match identifier.pattern {
Pattern::GreaterThanOrEqual(x) => {
assert_eq!(x, 1);
}
_ => panic!("unexpected pattern"),
}
}
#[test]
fn less_than() {
let identifier = "<1".to_owned().into_identifier().unwrap();
match identifier.pattern {
Pattern::LessThan(x) => {
assert_eq!(x, 1);
}
_ => panic!("unexpected pattern"),
}
}
#[test]
fn less_than_or_equal() {
let identifier = "<=1".to_owned().into_identifier().unwrap();
match identifier.pattern {
Pattern::LessThanOrEqual(x) => {
assert_eq!(x, 1);
}
_ => panic!("unexpected pattern"),
}
}
#[test]
fn regex() {
let identifier = "?foo".to_owned().into_identifier().unwrap();
match identifier.pattern {
Pattern::Regex(x) => {
assert_eq!(x.as_str(), "foo");
}
_ => panic!("unexpected pattern"),
}
}
#[test]
fn starts_with() {
let identifier = "foo*".to_owned().into_identifier().unwrap();
match identifier.pattern {
Pattern::StartsWith(x) => {
assert_eq!(x.as_str(), "foo");
}
_ => panic!("unexpected pattern"),
}
}
}
| 29.268966 | 98 | 0.450401 |
71e628e0e0f0bc24b0e98bc359c7fd21f5e9d4a3 | 537 | // ---------------------------------------------------
// Advent of Code 2021
// Day 23: Amphipod
//
// Copyright © 2021 Michael Fenske. See LICENSE.txt.
// ---------------------------------------------------
mod day23;
fn main() {
let start_time = std::time::SystemTime::now();
println!("\n------------------");
println!(" Day 23: Amphipod");
println!("------------------");
day23::run();
let end_time = std::time::SystemTime::now();
println!("{:?}", end_time.duration_since(start_time).unwrap());
}
| 24.409091 | 67 | 0.435754 |
6a3c1286d1c14a32f803851418f529dbe23645b0 | 3,676 | use crate::jcli_lib::certificate::{write_cert, Error};
use crate::jcli_lib::utils;
use bech32::FromBase32;
use chain_impl_mockchain::{
certificate::{Certificate, VoteCast, VotePlanId},
vote::{Choice, Payload},
};
use rand_chacha::rand_core::SeedableRng;
use std::path::PathBuf;
use structopt::StructOpt;
#[derive(StructOpt)]
pub struct PublicVoteCast {
/// the vote plan identified on the blockchain
#[structopt(long = "vote-plan-id")]
vote_plan_id: VotePlanId,
/// the number of proposal in the vote plan you vote for
#[structopt(long = "proposal-index")]
proposal_index: u8,
/// the number of choice within the proposal you vote for
#[structopt(long = "choice")]
choice: u8,
/// write the output to the given file or print it to the standard output if not defined
#[structopt(long = "output")]
output: Option<PathBuf>,
}
#[derive(StructOpt)]
pub struct PrivateVoteCast {
/// the vote plan identified on the blockchain
#[structopt(long = "vote-plan-id")]
vote_plan_id: VotePlanId,
/// the number of proposal in the vote plan you vote for
#[structopt(long = "proposal-index")]
proposal_index: u8,
/// size of voting options
#[structopt(long = "options-size")]
options: usize,
/// the number of choice within the proposal you vote for
#[structopt(long = "choice")]
choice: u8,
/// key to encrypt the vote with
#[structopt(long = "key-path")]
encrypting_key_path: Option<PathBuf>,
/// write the output to the given file or print it to the standard output if not defined
#[structopt(long = "output")]
output: Option<PathBuf>,
}
/// create a vote cast certificate
#[derive(StructOpt)]
pub enum VoteCastCmd {
Public(PublicVoteCast),
Private(PrivateVoteCast),
}
impl PublicVoteCast {
pub fn exec(self) -> Result<(), Error> {
let payload = Payload::Public {
choice: Choice::new(self.choice),
};
let vote_cast = VoteCast::new(self.vote_plan_id, self.proposal_index, payload);
let cert = Certificate::VoteCast(vote_cast);
write_cert(self.output.as_deref(), cert.into())
}
}
impl PrivateVoteCast {
pub fn exec(self) -> Result<(), Error> {
let mut rng = rand_chacha::ChaChaRng::from_entropy();
let key_line = utils::io::read_line(&self.encrypting_key_path)?;
let (hrp, data) = bech32::decode(&key_line).map_err(Error::InvalidBech32)?;
if hrp != crate::jcli_lib::vote::bech32_constants::ENCRYPTING_VOTE_PK_HRP {
return Err(Error::InvalidBech32Key {
expected: crate::jcli_lib::vote::bech32_constants::ENCRYPTING_VOTE_PK_HRP
.to_string(),
actual: hrp,
});
}
let key_bin = Vec::<u8>::from_base32(&data)?;
let key =
chain_vote::EncryptingVoteKey::from_bytes(&key_bin).ok_or(Error::VoteEncryptingKey)?;
let vote = chain_vote::Vote::new(self.options, self.choice as usize);
let (encrypted_vote, proof) =
chain_impl_mockchain::vote::encrypt_vote(&mut rng, &key, vote);
let payload = Payload::Private {
encrypted_vote,
proof,
};
let vote_cast = VoteCast::new(self.vote_plan_id, self.proposal_index, payload);
let cert = Certificate::VoteCast(vote_cast);
write_cert(self.output.as_deref(), cert.into())
}
}
impl VoteCastCmd {
pub fn exec(self) -> Result<(), Error> {
match self {
VoteCastCmd::Public(vote_cast) => vote_cast.exec(),
VoteCastCmd::Private(vote_cast) => vote_cast.exec(),
}
}
}
| 31.689655 | 97 | 0.638194 |
1dad4cfad3e8d5c68e4813bb973ce1276110e4e0 | 4,157 | // Copyright 2018 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use jni::JNIEnv;
use std::any::Any;
use std::thread;
use std::result;
use std::error::Error;
use JniError;
type ExceptionResult<T> = thread::Result<result::Result<T, JniError>>;
// Returns value or "throws" exception. `error_val` is returned, because exception will be thrown
// at the Java side. So this function should be used only for the `panic::catch_unwind` result.
pub fn unwrap_exc_or<T>(env: &JNIEnv, res: ExceptionResult<T>, error_val: T) -> T {
match res {
Ok(val) => {
match val {
Ok(val) => val,
Err(jni_error) => {
// Do nothing if there is a pending Java-exception that will be thrown
// automatically by the JVM when the native method returns.
if !env.exception_check().unwrap() {
// Throw a Java exception manually in case of an internal error.
throw(env, &jni_error.to_string())
}
error_val
}
}
}
Err(ref e) => {
throw(env, &any_to_string(e));
error_val
}
}
}
// Same as `unwrap_exc_or` but returns default value.
pub fn unwrap_exc_or_default<T: Default>(env: &JNIEnv, res: ExceptionResult<T>) -> T {
unwrap_exc_or(env, res, T::default())
}
// Calls a corresponding `JNIEnv` method, so exception will be thrown when execution returns to
// the Java side.
fn throw(env: &JNIEnv, description: &str) {
// We cannot throw exception from this function, so errors should be written in log instead.
let exception = match env.find_class("java/lang/RuntimeException") {
Ok(val) => val,
Err(e) => {
error!(
"Unable to find 'RuntimeException' class: {}",
e.description()
);
return;
}
};
if let Err(e) = env.throw_new(exception, description) {
error!(
"Unable to find 'RuntimeException' class: {}",
e.description()
);
}
}
// Tries to get meaningful description from panic-error.
pub fn any_to_string(any: &Box<Any + Send>) -> String {
if let Some(s) = any.downcast_ref::<&str>() {
s.to_string()
} else if let Some(s) = any.downcast_ref::<String>() {
s.clone()
} else if let Some(error) = any.downcast_ref::<Box<Error + Send>>() {
error.description().to_string()
} else {
"Unknown error occurred".to_string()
}
}
#[cfg(test)]
mod tests {
use std::panic;
use std::error::Error;
use super::*;
#[test]
fn str_any() {
let string = "Static string (&str)";
let error = panic_error(string);
assert_eq!(string, any_to_string(&error));
}
#[test]
fn string_any() {
let string = "Owned string (String)".to_owned();
let error = panic_error(string.clone());
assert_eq!(string, any_to_string(&error));
}
#[test]
fn box_error_any() {
let error: Box<Error + Send> = Box::new("e".parse::<i32>().unwrap_err());
let description = error.description().to_owned();
let error = panic_error(error);
assert_eq!(description, any_to_string(&error));
}
#[test]
fn unknown_any() {
let error = panic_error(1);
assert_eq!("Unknown error occurred", any_to_string(&error));
}
fn panic_error<T: Send + 'static>(val: T) -> Box<Any + Send> {
panic::catch_unwind(panic::AssertUnwindSafe(|| panic!(val))).unwrap_err()
}
}
| 32.224806 | 97 | 0.593216 |
bb44c40c8231303128c2cafcbf9f74681444ee89 | 316 | use proc_macro2::{Literal, TokenStream};
use quote::quote;
/// Functions used by the Abigen to expand functions defined in an ABI spec.
/// Expands a doc string into an attribute token stream.
pub fn expand_doc(s: &str) -> TokenStream {
let doc = Literal::string(s);
quote! {
#[doc = #doc]
}
}
| 24.307692 | 76 | 0.658228 |
769150ade96fe3f38a540ba4cbaaf147b4d66e91 | 15,046 | //! RTR PDUs.
//!
//! This module contains types that represent the protocol data units of
//! RPKI-RTR in their wire representation. That is, these types can be
//! used given to read and write operations as buffers.
//! See section 5 of RFC 6810 and RFC 8210. Annoyingly, the format of the
//! `EndOfData` PDU changes between the two versions.
use std::{io, mem, slice};
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use futures::future::Future;
use tokio::io::{
AsyncRead, AsyncWrite, ReadExact, WriteAll, read_exact, write_all
};
use crate::origins::AddressOrigin;
use super::serial::Serial;
//------------ Macro for Common Impls ----------------------------------------
macro_rules! common {
( $type:ident ) => {
#[allow(dead_code)]
impl $type {
pub fn read<A: AsyncRead>(
a: A
) -> ReadExact<A, $type> {
read_exact(a, Self::default())
}
pub fn write<A: AsyncWrite>(
self,
a: A
) -> WriteAll<A, $type> {
write_all(a, self)
}
}
impl AsRef<[u8]> for $type {
fn as_ref(&self) -> &[u8] {
unsafe {
slice::from_raw_parts(
self as *const Self as *const u8,
mem::size_of::<Self>()
)
}
}
}
impl AsMut<[u8]> for $type {
fn as_mut(&mut self) -> &mut [u8] {
unsafe {
slice::from_raw_parts_mut(
self as *mut Self as *mut u8,
mem::size_of::<Self>()
)
}
}
}
}
}
//------------ SerialQuery ---------------------------------------------------
#[derive(Default)]
#[repr(packed)]
#[allow(dead_code)]
pub struct SerialNotify {
header: Header,
serial: u32,
}
impl SerialNotify {
pub const PDU: u8 = 0;
pub const LEN: u32 = 12;
pub fn new(version: u8, session: u16, serial: Serial) -> Self {
SerialNotify {
header: Header::new(version, Self::PDU, session, Self::LEN),
serial: serial.to_be(),
}
}
}
common!(SerialNotify);
//------------ SerialQuery ---------------------------------------------------
pub const SERIAL_QUERY_PDU: u8 = 1;
pub const SERIAL_QUERY_LEN: u32 = 12;
#[allow(dead_code)] // We currently don’t need this, but might later ...
#[derive(Default)]
#[repr(packed)]
pub struct SerialQuery {
header: Header,
payload: SerialQueryPayload,
}
#[allow(dead_code)]
impl SerialQuery {
pub const PDU: u8 = 1;
pub const LEN: u32 = 12;
pub fn new(version: u8, session: u16, serial: Serial) -> Self {
SerialQuery {
header: Header::new(version, Self::PDU, session, 12),
payload: SerialQueryPayload::new(serial),
}
}
pub fn version(&self) -> u8 {
self.header.version
}
pub fn session(&self) -> u16 {
u16::from_be(self.header.session)
}
pub fn serial(&self) -> Serial {
self.payload.serial()
}
}
common!(SerialQuery);
//------------ SerialQueryPayload --------------------------------------------
#[derive(Default)]
#[repr(packed)]
pub struct SerialQueryPayload {
serial: u32
}
impl SerialQueryPayload {
pub fn new(serial: Serial) -> Self {
SerialQueryPayload {
serial: serial.to_be()
}
}
pub fn serial(&self) -> Serial {
Serial::from_be(self.serial)
}
}
common!(SerialQueryPayload);
//------------ ResetQuery ----------------------------------------------------
#[derive(Default)]
#[repr(packed)]
pub struct ResetQuery {
#[allow(dead_code)]
header: Header
}
impl ResetQuery {
pub const PDU: u8 = 2;
pub const LEN: u32 = 8;
#[allow(dead_code)]
pub fn new(version: u8) -> Self {
ResetQuery {
header: Header::new(version, 2, 0, 8)
}
}
}
common!(ResetQuery);
//------------ CacheResponse -------------------------------------------------
#[derive(Default)]
#[repr(packed)]
pub struct CacheResponse {
#[allow(dead_code)]
header: Header
}
impl CacheResponse {
pub fn new(version: u8, session: u16) -> Self {
CacheResponse {
header: Header::new(version, 3, session, 8)
}
}
}
common!(CacheResponse);
//------------ Ipv4Prefix ----------------------------------------------------
#[derive(Default)]
#[repr(packed)]
#[allow(dead_code)]
pub struct Ipv4Prefix {
header: Header,
flags: u8,
prefix_len: u8,
max_len: u8,
zero: u8,
prefix: u32,
asn: u32
}
#[allow(dead_code)]
impl Ipv4Prefix {
pub fn new(
version: u8,
flags: u8,
prefix_len: u8,
max_len: u8,
prefix: Ipv4Addr,
asn: u32
) -> Self {
Ipv4Prefix {
header: Header::new(version, 4, 0, 20),
flags,
prefix_len,
max_len,
zero: 0,
prefix: u32::from(prefix).to_be(),
asn: asn.to_be()
}
}
pub fn version(&self) -> u8 {
self.header.version
}
pub fn flags(&self) -> u8 {
self.flags
}
pub fn prefix_len(&self) -> u8 {
self.prefix_len
}
pub fn max_len(&self) -> u8 {
self.max_len
}
pub fn prefix(&self) -> Ipv4Addr {
u32::from_be(self.prefix).into()
}
pub fn asn(&self) -> u32 {
u32::from_be(self.asn)
}
}
common!(Ipv4Prefix);
//------------ Ipv6Prefix ----------------------------------------------------
#[derive(Default)]
#[repr(packed)]
#[allow(dead_code)]
pub struct Ipv6Prefix {
header: Header,
flags: u8,
prefix_len: u8,
max_len: u8,
zero: u8,
prefix: u128,
asn: u32,
}
#[allow(dead_code)]
impl Ipv6Prefix {
pub fn new(
version: u8,
flags: u8,
prefix_len: u8,
max_len: u8,
prefix: Ipv6Addr,
asn: u32
) -> Self {
Ipv6Prefix {
header: Header::new(version, 6, 0, 32),
flags,
prefix_len,
max_len,
zero: 0,
prefix: u128::from(prefix).to_be(),
asn: asn.to_be()
}
}
pub fn version(&self) -> u8 {
self.header.version
}
pub fn flags(&self) -> u8 {
self.flags
}
pub fn prefix_len(&self) -> u8 {
self.prefix_len
}
pub fn max_len(&self) -> u8 {
self.max_len
}
pub fn prefix(&self) -> Ipv6Addr {
u128::from_be(self.prefix).into()
}
pub fn asn(&self) -> u32 {
u32::from_be(self.asn)
}
}
common!(Ipv6Prefix);
//------------ Prefix --------------------------------------------------------
pub enum Prefix {
V4(Ipv4Prefix),
V6(Ipv6Prefix),
}
impl Prefix {
pub fn new(version: u8, flags: u8, origin: &AddressOrigin) -> Self {
let prefix = origin.prefix();
match prefix.address() {
IpAddr::V4(addr) => {
Prefix::V4(
Ipv4Prefix::new(
version,
flags,
prefix.address_length(),
origin.max_length(),
addr,
origin.as_id().into()
)
)
}
IpAddr::V6(addr) => {
Prefix::V6(
Ipv6Prefix::new(
version,
flags,
prefix.address_length(),
origin.max_length(),
addr,
origin.as_id().into()
)
)
}
}
}
pub fn write<A: AsyncWrite>(
self,
a: A
) -> WriteAll<A, Self> {
write_all(a, self)
}
}
impl AsRef<[u8]> for Prefix {
fn as_ref(&self) -> &[u8] {
match *self {
Prefix::V4(ref prefix) => prefix.as_ref(),
Prefix::V6(ref prefix) => prefix.as_ref(),
}
}
}
impl AsMut<[u8]> for Prefix {
fn as_mut(&mut self) -> &mut [u8] {
match *self {
Prefix::V4(ref mut prefix) => prefix.as_mut(),
Prefix::V6(ref mut prefix) => prefix.as_mut(),
}
}
}
//------------ EndOfData -----------------------------------------------------
/// Generic End-of-Data PDU.
///
/// This PDU differs between version 0 and 1 of RTR. Consequently, this
/// generic version is an enum that can be both, depending on the version
/// requested.
pub enum EndOfData {
V0(EndOfDataV0),
V1(EndOfDataV1),
}
impl EndOfData {
pub fn new(
version: u8,
session: u16,
serial: Serial,
refresh: u32,
retry: u32,
expire: u32
) -> Self {
if version == 0 {
EndOfData::V0(EndOfDataV0::new(session, serial))
}
else {
EndOfData::V1(EndOfDataV1::new(
version, session, serial, refresh, retry, expire
))
}
}
pub fn write<A: AsyncWrite>(self, a: A) -> WriteAll<A, Self> {
write_all(a, self)
}
}
impl AsRef<[u8]> for EndOfData {
fn as_ref(&self) -> &[u8] {
match *self {
EndOfData::V0(ref inner) => inner.as_ref(),
EndOfData::V1(ref inner) => inner.as_ref(),
}
}
}
impl AsMut<[u8]> for EndOfData {
fn as_mut(&mut self) -> &mut [u8] {
match *self {
EndOfData::V0(ref mut inner) => inner.as_mut(),
EndOfData::V1(ref mut inner) => inner.as_mut(),
}
}
}
//------------ EndOfDataV0 ---------------------------------------------------
#[derive(Default)]
#[repr(packed)]
#[allow(dead_code)]
pub struct EndOfDataV0 {
header: Header,
serial: u32
}
#[allow(dead_code)]
impl EndOfDataV0 {
pub fn new(session: u16, serial: Serial) -> Self {
EndOfDataV0 {
header: Header::new(0, 7, session, 12),
serial: serial.to_be()
}
}
pub fn version(&self) -> u8 {
self.header.version
}
pub fn session(&self) -> u16 {
u16::from_be(self.header.session)
}
pub fn serial(&self) -> Serial {
Serial::from_be(self.serial)
}
}
common!(EndOfDataV0);
//------------ EndOfDataV1 ---------------------------------------------------
#[derive(Default)]
#[repr(packed)]
#[allow(dead_code)]
pub struct EndOfDataV1 {
header: Header,
serial: u32,
refresh: u32,
retry: u32,
expire: u32,
}
#[allow(dead_code)]
impl EndOfDataV1 {
pub fn new(
version: u8,
session: u16,
serial: Serial,
refresh: u32,
retry: u32,
expire: u32
) -> Self {
EndOfDataV1 {
header: Header::new(version, 7, session, 24),
serial: serial.to_be(),
refresh: refresh.to_be(),
retry: retry.to_be(),
expire: expire.to_be(),
}
}
pub fn version(&self) -> u8 {
self.header.version
}
pub fn session(&self) -> u16 {
u16::from_be(self.header.session)
}
pub fn serial(&self) -> Serial {
Serial::from_be(self.serial)
}
pub fn refresh(&self) -> u32 {
u32::from_be(self.refresh)
}
pub fn retry(&self) -> u32 {
u32::from_be(self.retry)
}
pub fn expire(&self) -> u32 {
u32::from_be(self.expire)
}
}
common!(EndOfDataV1);
//------------ CacheReset ----------------------------------------------------
#[derive(Default)]
#[repr(packed)]
#[allow(dead_code)]
pub struct CacheReset {
header: Header
}
#[allow(dead_code)]
impl CacheReset {
pub fn new(version: u8) -> Self {
CacheReset {
header: Header::new(version, 8, 0, 8)
}
}
pub fn version(&self) -> u8 {
self.header.version
}
}
common!(CacheReset);
//------------ Error ---------------------------------------------------------
#[derive(Default)]
#[repr(packed)]
#[allow(dead_code)]
pub struct Error<P: Sized, T: Sized> {
header: Header,
pdu_len: u32,
pdu: P,
text_len: u32,
text: T
}
impl<P, T> Error<P, T>
where
P: Sized + 'static + Send,
T: Sized + 'static + Send
{
pub fn new(
version: u8,
error_code: u16,
pdu: P,
text: T
) -> Self {
Error {
header: Header::new(
version, 10, error_code,
16 + mem::size_of::<P>() as u32 + mem::size_of::<T>() as u32
),
pdu_len: (mem::size_of::<P>() as u32).to_be(),
pdu,
text_len: (mem::size_of::<T>() as u32).to_be(),
text
}
}
pub fn boxed(self) -> BoxedError {
BoxedError(Box::new(self))
}
}
#[allow(dead_code)]
impl<P: Default + Sized, T: Default + Sized> Error<P, T> {
pub fn read<A: AsyncRead>(
a: A
) -> impl Future<Item=(A, Self), Error=io::Error> {
read_exact(a, Self::default())
}
}
#[allow(dead_code)]
impl<P: Sized, T: Sized> Error<P, T> {
pub fn write<A: AsyncWrite>(
self,
a: A
) -> impl Future<Item=(A, Self), Error=io::Error> {
write_all(a, self)
}
}
impl<P: Sized, T: Sized> AsRef<[u8]> for Error<P, T> {
fn as_ref(&self) -> &[u8] {
unsafe {
slice::from_raw_parts(
self as *const Self as *const u8,
mem::size_of::<Self>()
)
}
}
}
impl<P: Sized, T: Sized> AsMut<[u8]> for Error<P, T> {
fn as_mut(&mut self) -> &mut [u8] {
unsafe {
slice::from_raw_parts_mut(
self as *mut Self as *mut u8,
mem::size_of::<Self>()
)
}
}
}
//------------ BoxedError ----------------------------------------------------
pub struct BoxedError(Box<dyn AsRef<[u8]> + Send>);
impl BoxedError {
pub fn write<A: AsyncWrite>(self, a: A) -> WriteAll<A, Self> {
write_all(a, self)
}
}
impl AsRef<[u8]> for BoxedError {
fn as_ref(&self) -> &[u8] {
self.0.as_ref().as_ref()
}
}
//------------ Header --------------------------------------------------------
#[derive(Clone, Copy, Default)]
#[repr(packed)]
pub struct Header {
version: u8,
pdu: u8,
session: u16,
length: u32,
}
impl Header {
pub fn new(version: u8, pdu: u8, session: u16, length: u32) -> Self {
Header {
version,
pdu,
session: session.to_be(),
length: length.to_be(),
}
}
pub fn version(self) -> u8 {
self.version
}
pub fn pdu(self) -> u8 {
self.pdu
}
pub fn session(self) -> u16 {
u16::from_be(self.session)
}
pub fn length(self) -> u32 {
u32::from_be(self.length)
}
}
common!(Header);
| 21.341844 | 78 | 0.469959 |
fb381627e5176a3f787c54405e3b8e2d6fbf67e2 | 2,172 | //! `tyr` utilities
use error::{ErrorKind, Result};
use std::collections::BTreeMap;
use term;
/// Pad a string to the given length.
fn pad_left(len: usize, s: &str) -> String {
let mut output = String::new();
let s_len = s.len();
if s_len < len {
let pad_len = len - s_len;
for _ in 0..pad_len {
output.push(' ');
}
}
output.push_str(s);
output
}
/// Pretty print table column information.
pub fn pretty_print_tables(tables: &BTreeMap<String, ::run::Rows>) -> Result<()> {
for (table, rows) in tables {
let mut t = term::stdout().ok_or_else(|| ErrorKind::Stdout)?;
t.attr(term::Attr::Bold)?;
t.fg(term::color::GREEN)?;
let table_name = format!(" Table '{}' ", table);
writeln!(t, "{:#^80}", table_name)?;
t.reset()?;
t.flush()?;
for (idx, col_data) in rows {
t.fg(term::color::YELLOW)?;
t.attr(term::Attr::Bold)?;
let mut row_name = String::from(" Row ");
row_name.push_str(&(idx + 1).to_string());
row_name.push(' ');
writeln!(t, "{:-^80}", row_name)?;
t.reset()?;
t.flush()?;
let max_col_length = col_data.iter().map(|col| col.column_name().len()).max().ok_or_else(|| ErrorKind::Max)?;
for col in col_data {
t.fg(term::color::GREEN)?;
t.attr(term::Attr::Bold)?;
let padded_col_name = pad_left(max_col_length, col.column_name());
write!(t, "{}: ", padded_col_name)?;
t.reset()?;
t.flush()?;
t.fg(term::color::GREEN)?;
let type_info = col.type_info();
let data = if let Some(ref data) = *col.data() {
data.to_string(type_info)?
} else {
"(null)".to_string()
};
writeln!(t, "{}", data)?;
t.reset()?;
t.flush()?;
}
if (*idx as usize) < rows.len() - 1 {
writeln!(t, "")?;
}
}
}
Ok(())
}
| 30.591549 | 121 | 0.462707 |
f5b5d4dda199c90ea1eeb4c014d5ab3ece96ac71 | 4,851 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast;
use ast::{MetaItem, Item, Expr,};
use codemap::Span;
use ext::format;
use ext::base::ExtCtxt;
use ext::build::AstBuilder;
use ext::deriving::generic::*;
use ext::deriving::generic::ty::*;
use parse::token;
use ptr::P;
use std::collections::HashMap;
pub fn expand_deriving_show<F>(cx: &mut ExtCtxt,
span: Span,
mitem: &MetaItem,
item: &Item,
push: F) where
F: FnOnce(P<Item>),
{
// &mut ::std::fmt::Formatter
let fmtr = Ptr(box Literal(Path::new(vec!("std", "fmt", "Formatter"))),
Borrowed(None, ast::MutMutable));
let trait_def = TraitDef {
span: span,
attributes: Vec::new(),
path: Path::new(vec!("std", "fmt", "Debug")),
additional_bounds: Vec::new(),
generics: LifetimeBounds::empty(),
methods: vec!(
MethodDef {
name: "fmt",
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec!(fmtr),
ret_ty: Literal(Path::new(vec!("std", "fmt", "Result"))),
attributes: Vec::new(),
combine_substructure: combine_substructure(box |a, b, c| {
show_substructure(a, b, c)
})
}
)
};
trait_def.expand(cx, mitem, item, push)
}
/// We construct a format string and then defer to std::fmt, since that
/// knows what's up with formatting and so on.
fn show_substructure(cx: &mut ExtCtxt, span: Span,
substr: &Substructure) -> P<Expr> {
// build `<name>`, `<name>({}, {}, ...)` or `<name> { <field>: {},
// <field>: {}, ... }` based on the "shape".
//
// Easy start: they all start with the name.
let name = match *substr.fields {
Struct(_) => substr.type_ident,
EnumMatching(_, v, _) => v.node.name,
EnumNonMatchingCollapsed(..) | StaticStruct(..) | StaticEnum(..) => {
cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`")
}
};
let mut format_string = String::from_str(token::get_ident(name).get());
// the internal fields we're actually formatting
let mut exprs = Vec::new();
// Getting harder... making the format string:
match *substr.fields {
// unit struct/nullary variant: no work necessary!
Struct(ref fields) if fields.len() == 0 => {}
EnumMatching(_, _, ref fields) if fields.len() == 0 => {}
Struct(ref fields) | EnumMatching(_, _, ref fields) => {
if fields[0].name.is_none() {
// tuple struct/"normal" variant
format_string.push_str("(");
for (i, field) in fields.iter().enumerate() {
if i != 0 { format_string.push_str(", "); }
format_string.push_str("{:?}");
exprs.push(field.self_.clone());
}
format_string.push_str(")");
} else {
// normal struct/struct variant
format_string.push_str(" {{");
for (i, field) in fields.iter().enumerate() {
if i != 0 { format_string.push_str(","); }
let name = token::get_ident(field.name.unwrap());
format_string.push_str(" ");
format_string.push_str(name.get());
format_string.push_str(": {:?}");
exprs.push(field.self_.clone());
}
format_string.push_str(" }}");
}
}
_ => unreachable!()
}
// AST construction!
// we're basically calling
//
// format_arg_method!(fmt, write_fmt, "<format_string>", exprs...)
//
// but doing it directly via ext::format.
let formatter = substr.nonself_args[0].clone();
let meth = cx.ident_of("write_fmt");
let s = token::intern_and_get_ident(&format_string[]);
let format_string = cx.expr_str(span, s);
// phew, not our responsibility any more!
let args = vec![
format::expand_preparsed_format_args(cx, span, format_string,
exprs, vec![], HashMap::new())
];
cx.expr_method_call(span, formatter, meth, args)
}
| 34.404255 | 77 | 0.537415 |
1d8db03e7392bdf78ed40270e6b30297fa376237 | 10,939 | use crate::data::ApplicationError;
use crate::data::LinePattern;
use crate::data::LogQueryContext;
use crate::data::LogStream;
use crate::data::ParsedLine;
use crate::data::StreamEntry;
use crate::util;
use chrono::Datelike;
use chrono::TimeZone;
use core::pin::Pin;
use flate2::read::GzDecoder;
use futures::stream::Stream;
use futures::task::Context;
use futures::task::Poll;
use futures_util::stream::StreamExt;
use regex::Regex;
use std::cmp::Ordering;
use std::fs;
use std::fs::read_dir;
use std::fs::DirEntry;
use std::io::BufRead;
use std::io::BufReader;
use std::io::Lines;
use std::path::Path;
use std::sync::Arc;
use std::time::SystemTime;
enum LinesIter {
GZIP(Lines<BufReader<GzDecoder<std::fs::File>>>),
PLAIN(Lines<BufReader<std::fs::File>>),
}
impl Iterator for LinesIter {
type Item = std::io::Result<String>;
fn next(&mut self) -> Option<Self::Item> {
match self {
LinesIter::GZIP(it) => it.next(),
LinesIter::PLAIN(it) => it.next(),
}
}
}
struct FileLogStream {
path: String,
line_pattern: Arc<LinePattern>,
context: Arc<LogQueryContext>,
lines_iter: Option<LinesIter>,
year: i32,
watch: bool,
}
impl FileLogStream {
fn new(path: &str, line_pattern: &Arc<LinePattern>, context: &Arc<LogQueryContext>) -> Self {
FileLogStream {
path: path.to_owned(),
line_pattern: line_pattern.clone(),
context: context.clone(),
lines_iter: None,
year: 0,
watch: false,
}
}
fn with_watch(mut self) -> Self {
self.watch = true;
self
}
fn apply_pattern(line: &str, line_pattern: &LinePattern, year: i32) -> ParsedLine {
let maybe_matches = line_pattern.grok.match_against(&line);
if let Some(matches) = maybe_matches {
let timestamp = matches
.get("timestamp")
.map(|ts| {
let parse_result = if line_pattern.syslog_ts {
let ts_w_year = format!("{} {}", year, ts);
chrono::NaiveDateTime::parse_from_str(&ts_w_year, &line_pattern.chrono)
} else {
chrono::NaiveDateTime::parse_from_str(&ts, &line_pattern.chrono)
};
parse_result
.map(|ndt| {
line_pattern
.timezone
.from_local_datetime(&ndt)
.single()
.map(|dt| {
dt.timestamp() as u128 * 1000
+ (dt.timestamp_subsec_millis() as u128)
})
.unwrap_or(0)
})
.unwrap_or(0)
})
.unwrap_or(0);
ParsedLine {
timestamp,
loglevel: matches.get("loglevel").map(|s| s.to_string()),
message: matches.get("message").unwrap_or("").to_string(),
}
} else {
ParsedLine {
timestamp: 0,
loglevel: None,
message: format!("Failed to parse: {}", line),
}
}
}
fn next_line(&mut self) -> Poll<Option<Result<StreamEntry, ApplicationError>>> {
let lines_iter = self.lines_iter.as_mut();
let lines_iter = lines_iter.unwrap(); // should panic, if this is called with None option
while let Some(nextline) = lines_iter.next() {
match nextline {
Ok(line) => {
let parsed_line =
FileLogStream::apply_pattern(&line, &self.line_pattern, self.year);
if !self.context.matches(&parsed_line) {
continue;
} else {
return Poll::Ready(Some(Ok(StreamEntry { line, parsed_line })));
}
}
Err(e) => {
error!("Stream error: {:?}", e);
break;
}
}
}
if self.watch {
Poll::Pending // why is this awakened?
} else {
Poll::Ready(None)
}
}
}
impl Stream for FileLogStream {
type Item = Result<StreamEntry, ApplicationError>;
fn poll_next(self: Pin<&mut Self>, _ctx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut inner_self = self.get_mut();
match &mut inner_self.lines_iter {
Some(_) => inner_self.next_line(),
None => {
inner_self.year = std::fs::metadata(&inner_self.path)
.map(|meta| meta.modified())
.map(|maybe_time| {
maybe_time
.map(|systime| util::system_time_to_date_time(systime).year())
.unwrap_or(0)
})
.unwrap_or(0);
match std::fs::File::open(&inner_self.path) {
Ok(file) => {
let lines_iter = if inner_self.path.ends_with(".gz") {
LinesIter::GZIP(BufReader::new(GzDecoder::new(file)).lines())
} else {
LinesIter::PLAIN(BufReader::new(file).lines())
};
inner_self.lines_iter = Some(lines_iter);
inner_self.next_line()
}
Err(e) => {
error!("Stream error: {:?}", e);
Poll::Ready(None)
}
}
}
}
}
}
pub struct FileSource;
impl FileSource {
pub fn create_stream(
file_pattern: &Regex,
line_pattern: &Arc<LinePattern>,
context: &Arc<LogQueryContext>,
) -> Result<LogStream, ApplicationError> {
let files = Self::resolve_files(&file_pattern, context.from_ms)?;
let mut peekable_iter = files.iter().peekable();
let mut streams = Vec::<FileLogStream>::new();
while let Some(file) = peekable_iter.next() {
let metadata = fs::metadata(&file).map_err(|_| ApplicationError::FailedToReadSource);
let stream = match metadata {
Ok(meta) if !meta.is_dir() => {
let fstream = FileLogStream::new(&file, &line_pattern, &context);
// for the last file, add a watch flag if requested
let fstream = if let Some(true) = context.watch {
if let None = peekable_iter.peek() {
fstream.with_watch()
} else {
fstream
}
} else {
fstream
};
Ok(fstream)
}
_ => Err(ApplicationError::FailedToReadSource),
}?;
streams.push(stream);
}
let fullstream = futures::stream::iter(streams).flatten();
Ok(fullstream.boxed_local())
}
fn resolve_files(file_pattern: &Regex, from_ms: u128) -> Result<Vec<String>, ApplicationError> {
let folder = Path::new(file_pattern.as_str())
.parent()
.ok_or(ApplicationError::FailedToReadSource)?;
debug!("Reading folder {:?}", folder);
let now = SystemTime::now();
let files_iter = read_dir(folder)
.map_err(|e| {
error!("{}", e);
ApplicationError::FailedToReadSource
})?
.filter_map(Result::ok)
.flat_map(|entry: DirEntry| {
trace!("Found entry {:?}", entry);
let t = entry
.path()
.to_str()
.map(|path| {
let maybe_matches = file_pattern.captures(path);
if let Some(captures) = maybe_matches {
if from_ms > 0 {
let modtime = fs::metadata(&path)
.map(|meta| meta.modified())
.map(|maybe_time| maybe_time.unwrap_or_else(|_| now))
.unwrap_or_else(|_| now);
let modtime_ms = modtime
.duration_since(std::time::UNIX_EPOCH)
.expect("Time went backwards")
.as_millis();
if modtime_ms < from_ms {
debug!("{} older than timestamp filter", path);
return None;
}
}
debug!("matching file: {}", path);
let rotation_idx = captures
.name("rotation")
.map(|e| e.as_str().parse::<i32>())
.and_then(|r| r.ok());
Some((path.to_string(), rotation_idx.unwrap_or(0)))
} else {
None
}
})
.and_then(|t| t);
t
});
let mut vec: Vec<(String, i32)> = files_iter.collect();
vec.sort_by(|(path_a, idx_a), (path_b, idx_b)| match idx_b.cmp(idx_a) {
Ordering::Equal => {
let modtime_a = fs::metadata(&path_a)
.map(|meta| meta.modified())
.map(|maybe_time| maybe_time.unwrap_or_else(|_| now))
.unwrap_or_else(|_| now);
let modtime_b = fs::metadata(&path_b)
.map(|meta| meta.modified())
.map(|maybe_time| maybe_time.unwrap_or_else(|_| now))
.unwrap_or_else(|_| now);
modtime_a.cmp(&modtime_b)
}
ord => ord,
});
Ok(vec.into_iter().map(|(p, _)| p).collect())
}
}
#[cfg(test)]
mod tests {
use crate::log_source::file_source::FileSource;
use regex::Regex;
#[test]
fn test_resolve_files() {
let regex = Regex::new(r#"tests/demo\.log(\.(?P<rotation>\d)(\.gz)?)?"#).unwrap();
let result = FileSource::resolve_files(®ex, 0).unwrap();
assert_eq!(result.len(), 3);
assert_eq!(result.get(0), Some(&"tests/demo.log.2.gz".to_string()));
assert_eq!(result.get(1), Some(&"tests/demo.log.1".to_string()));
assert_eq!(result.get(2), Some(&"tests/demo.log".to_string()));
}
}
| 36.342193 | 100 | 0.460097 |
7ab8001f62a9b765211602ea2180c0b68dc27f13 | 117 | #[macro_use]
mod smack;
use smack::*;
// @expect error
fn main() {
let a = 2;
let b = 3;
assert!(b/a != 1);
}
| 10.636364 | 20 | 0.521368 |
911bfc7454c2a9992400604cce85ad8ef4be5ec4 | 3,820 | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-emscripten no threads support
#![feature(rand)]
#![feature(const_fn)]
use std::sync::atomic::{AtomicUsize, Ordering};
use std::__rand::{thread_rng, Rng};
use std::thread;
const REPEATS: usize = 5;
const MAX_LEN: usize = 32;
static drop_counts: [AtomicUsize; MAX_LEN] =
// FIXME #5244: AtomicUsize is not Copy.
[
AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
AtomicUsize::new(0), AtomicUsize::new(0),
];
static creation_count: AtomicUsize = AtomicUsize::new(0);
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord)]
struct DropCounter { x: u32, creation_id: usize }
impl Drop for DropCounter {
fn drop(&mut self) {
drop_counts[self.creation_id].fetch_add(1, Ordering::Relaxed);
}
}
pub fn main() {
// len can't go above 64.
for len in 2..MAX_LEN {
for _ in 0..REPEATS {
// reset the count for these new DropCounters, so their
// IDs start from 0.
creation_count.store(0, Ordering::Relaxed);
let mut rng = thread_rng();
let main = (0..len).map(|_| {
DropCounter {
x: rng.next_u32(),
creation_id: creation_count.fetch_add(1, Ordering::Relaxed),
}
}).collect::<Vec<_>>();
// work out the total number of comparisons required to sort
// this array...
let mut count = 0_usize;
main.clone().sort_by(|a, b| { count += 1; a.cmp(b) });
// ... and then panic on each and every single one.
for panic_countdown in 0..count {
// refresh the counters.
for c in &drop_counts {
c.store(0, Ordering::Relaxed);
}
let v = main.clone();
let _ = thread::spawn(move|| {
let mut v = v;
let mut panic_countdown = panic_countdown;
v.sort_by(|a, b| {
if panic_countdown == 0 {
panic!()
}
panic_countdown -= 1;
a.cmp(b)
})
}).join();
// check that the number of things dropped is exactly
// what we expect (i.e. the contents of `v`).
for (i, c) in drop_counts.iter().enumerate().take(len) {
let count = c.load(Ordering::Relaxed);
assert!(count == 1,
"found drop count == {} for i == {}, len == {}",
count, i, len);
}
}
}
}
}
| 37.087379 | 80 | 0.539529 |
8fe256669425b3ec96f7645879ba0e16bfc37c90 | 4,681 | use crate::result::Result;
use nix::unistd::Pid;
use simple_error::bail;
use simple_error::try_with;
use std::mem::size_of;
use std::mem::MaybeUninit;
#[cfg(all(target_os = "linux", target_env = "gnu"))]
const PTRACE_GET_SYSCALL_INFO: u32 = 0x420e;
#[cfg(not(all(target_os = "linux", target_env = "gnu")))]
const PTRACE_GET_SYSCALL_INFO: i32 = 0x420e;
#[repr(u8)]
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]
#[allow(dead_code)]
#[allow(non_camel_case_types, clippy::upper_case_acronyms)]
enum OpType {
PTRACE_SYSCALL_INFO_NONE = 0,
PTRACE_SYSCALL_INFO_ENTRY = 1,
PTRACE_SYSCALL_INFO_EXIT = 2,
PTRACE_SYSCALL_INFO_SECCOMP = 3,
unknown = 4,
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
struct Entry {
nr: u64,
args: [u64; 6],
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
struct Exit {
rval: i64,
is_error: u8,
}
#[repr(C)]
#[derive(Copy, Clone, Debug)]
struct Seccomp {
nr: u64,
args: [u64; 6],
ret_data: u32,
}
#[repr(C)]
#[derive(Copy, Clone)]
union RawData {
entry: Entry,
exit: Exit,
seccomp: Seccomp,
}
/// equivalent to `ptrace_syscall_info`
#[repr(C)]
#[derive(Copy, Clone)]
pub struct RawInfo {
op: OpType,
arch: u32,
instruction_pointer: u64,
stack_pointer: u64,
data: RawData,
}
/// See man ptrace (linux) for reference.
#[derive(Copy, Clone, Debug)]
pub struct SyscallInfo {
pub arch: u32,
pub instruction_pointer: u64,
pub stack_pointer: u64,
pub op: SyscallOp,
}
#[derive(Copy, Clone, Debug)]
pub enum SyscallOp {
Entry {
nr: u64,
args: [u64; 6],
},
Exit {
rval: i64,
is_error: u8,
},
Seccomp {
nr: u64,
args: [u64; 6],
ret_data: u32,
},
None,
}
fn parse_raw_data(info: RawInfo) -> Result<SyscallOp> {
let op = unsafe {
match info.op {
OpType::PTRACE_SYSCALL_INFO_NONE => SyscallOp::None,
OpType::PTRACE_SYSCALL_INFO_ENTRY => SyscallOp::Entry {
nr: info.data.entry.nr,
args: info.data.entry.args,
},
OpType::PTRACE_SYSCALL_INFO_EXIT => SyscallOp::Exit {
rval: info.data.exit.rval,
is_error: info.data.exit.is_error,
},
OpType::PTRACE_SYSCALL_INFO_SECCOMP => SyscallOp::Seccomp {
nr: info.data.seccomp.nr,
args: info.data.seccomp.args,
ret_data: info.data.seccomp.ret_data,
},
OpType::unknown => bail!("unknown ptrace_syscall_info.op: {:?}", info.op),
}
};
Ok(op)
}
fn parse_raw_info(raw: RawInfo) -> Result<SyscallInfo> {
let info = SyscallInfo {
arch: raw.arch,
instruction_pointer: raw.instruction_pointer,
stack_pointer: raw.stack_pointer,
op: parse_raw_data(raw)?,
};
Ok(info)
}
pub fn get_syscall_info(pid: Pid) -> Result<SyscallInfo> {
let mut info = MaybeUninit::<RawInfo>::zeroed();
// Safe, because the kernel writes at most size_of::<RawInfo>() bytes and at least `ret` bytes.
// We check he has written size_of::<RawInfo>() bytes. We also allow him to omit the trailing
// `data: RawData` field if he marks its absence in the op field, because in that case the
// parser (`parse_raw_info()`) will ignore the data and never access it.
let ret = unsafe {
libc::ptrace(
PTRACE_GET_SYSCALL_INFO,
pid,
size_of::<RawInfo>(),
info.as_mut_ptr(),
)
};
if ret <= 0 {
bail!("ptrace get syscall info error: {}", ret);
}
let info = unsafe { info.assume_init() };
if !((info.op == OpType::PTRACE_SYSCALL_INFO_NONE
&& size_of::<RawInfo>() - size_of::<RawData>() == ret as usize)
|| (size_of::<RawInfo>() == ret as usize))
{
bail!("ptrace wrote unexpected number of bytes");
}
let info = try_with!(
parse_raw_info(info),
"cannot understand ptrace(PTRACE_GET_SYSCALL_INFO) response"
);
Ok(info)
}
#[cfg(test)]
mod test {
#[test]
fn assert_struct_sizes() {
use super::*;
//assert_eq!(size_of::<RawInfo>(), 0); // for linux <= v5.2
assert_eq!(size_of::<RawInfo>(), 88); // for linux <= v5.10
//assert_eq!(size_of::<RawInfo>(), 84); // for linux >= v5.11
}
#[test]
fn check_linux_version() {
// TODO add a build.rs script which uses
// https://docs.rs/linux-version/0.1.1/linux_version/
// to detect linux version and enables the corresponding feature via
// https://doc.rust-lang.org/cargo/reference/build-scripts.html#rustc-cfg
}
}
| 26.150838 | 99 | 0.594745 |
fbd315ebcc18489724fa633b4acfe4cda16431b0 | 14,980 | // Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use std::thread;
use std::time;
use kvproto::kvrpcpb::Context;
use raft::eraftpb::MessageType;
use engine_traits::{CfName, IterOptions, CF_DEFAULT};
use test_raftstore::*;
use tikv::storage::kv::*;
use tikv::storage::CfStatistics;
use tikv_util::codec::bytes;
use tikv_util::HandyRwLock;
use txn_types::Key;
#[test]
fn test_raftkv() {
let count = 1;
let mut cluster = new_server_cluster(0, count);
cluster.run();
// make sure leader has been elected.
assert_eq!(cluster.must_get(b"k1"), None);
let region = cluster.get_region(b"");
let leader_id = cluster.leader_of_region(region.get_id()).unwrap();
let storage = cluster.sim.rl().storages[&leader_id.get_id()].clone();
let mut ctx = Context::default();
ctx.set_region_id(region.get_id());
ctx.set_region_epoch(region.get_region_epoch().clone());
ctx.set_peer(region.get_peers()[0].clone());
get_put(&ctx, &storage);
batch(&ctx, &storage);
seek(&ctx, &storage);
near_seek(&ctx, &storage);
cf(&ctx, &storage);
empty_write(&ctx, &storage);
wrong_context(&ctx, &storage);
// TODO: test multiple node
}
#[test]
fn test_read_leader_in_lease() {
let count = 3;
let mut cluster = new_server_cluster(0, count);
cluster.run();
let k1 = b"k1";
let (k2, v2) = (b"k2", b"v2");
// make sure leader has been elected.
assert_eq!(cluster.must_get(k1), None);
let region = cluster.get_region(b"");
let leader = cluster.leader_of_region(region.get_id()).unwrap();
let storage = cluster.sim.rl().storages[&leader.get_id()].clone();
let mut ctx = Context::default();
ctx.set_region_id(region.get_id());
ctx.set_region_epoch(region.get_region_epoch().clone());
ctx.set_peer(leader.clone());
// write some data
assert_none(&ctx, &storage, k2);
must_put(&ctx, &storage, k2, v2);
// isolate leader
cluster.add_send_filter(IsolationFilterFactory::new(leader.get_store_id()));
// leader still in lease, check if can read on leader
assert_eq!(can_read(&ctx, &storage, k2, v2), true);
}
#[test]
fn test_read_index_on_replica() {
let count = 3;
let mut cluster = new_server_cluster(0, count);
cluster.run();
let k1 = b"k1";
let (k2, v2) = (b"k2", b"v2");
// make sure leader has been elected.
assert_eq!(cluster.must_get(k1), None);
let region = cluster.get_region(b"");
let leader = cluster.leader_of_region(region.get_id()).unwrap();
let storage = cluster.sim.rl().storages[&leader.get_id()].clone();
let mut ctx = Context::default();
ctx.set_region_id(region.get_id());
ctx.set_region_epoch(region.get_region_epoch().clone());
ctx.set_peer(leader.clone());
// write some data
let peers = region.get_peers();
assert_none(&ctx, &storage, k2);
must_put(&ctx, &storage, k2, v2);
// read on follower
let mut follower_peer = None;
for p in peers {
if p.get_id() != leader.get_id() {
follower_peer = Some(p.clone());
break;
}
}
assert!(follower_peer.is_some());
ctx.set_peer(follower_peer.as_ref().unwrap().clone());
let resp = read_index_on_peer(
&mut cluster,
follower_peer.unwrap(),
region.clone(),
false,
std::time::Duration::from_secs(5),
);
assert!(!resp.as_ref().unwrap().get_header().has_error());
assert_ne!(
resp.unwrap().get_responses()[0]
.get_read_index()
.get_read_index(),
0
);
}
#[test]
fn test_read_on_replica() {
let count = 3;
let mut cluster = new_server_cluster(0, count);
cluster.run();
let k1 = b"k1";
let (k2, v2) = (b"k2", b"v2");
let (k3, v3) = (b"k3", b"v3");
let (k4, v4) = (b"k4", b"v4");
// make sure leader has been elected.
assert_eq!(cluster.must_get(k1), None);
let region = cluster.get_region(b"");
let leader = cluster.leader_of_region(region.get_id()).unwrap();
let leader_storage = cluster.sim.rl().storages[&leader.get_id()].clone();
let mut leader_ctx = Context::default();
leader_ctx.set_region_id(region.get_id());
leader_ctx.set_region_epoch(region.get_region_epoch().clone());
leader_ctx.set_peer(leader.clone());
// write some data
let peers = region.get_peers();
assert_none(&leader_ctx, &leader_storage, k2);
must_put(&leader_ctx, &leader_storage, k2, v2);
// read on follower
let mut follower_peer = None;
let mut follower_id = 0;
for p in peers {
if p.get_id() != leader.get_id() {
follower_id = p.get_id();
follower_peer = Some(p.clone());
break;
}
}
assert!(follower_peer.is_some());
let mut follower_ctx = Context::default();
follower_ctx.set_region_id(region.get_id());
follower_ctx.set_region_epoch(region.get_region_epoch().clone());
follower_ctx.set_peer(follower_peer.as_ref().unwrap().clone());
follower_ctx.set_replica_read(true);
let follower_storage = cluster.sim.rl().storages[&follower_id].clone();
assert_has(&follower_ctx, &follower_storage, k2, v2);
must_put(&leader_ctx, &leader_storage, k3, v3);
assert_has(&follower_ctx, &follower_storage, k3, v3);
cluster.stop_node(follower_id);
must_put(&leader_ctx, &leader_storage, k4, v4);
cluster.run_node(follower_id).unwrap();
let follower_storage = cluster.sim.rl().storages[&follower_id].clone();
// sleep to ensure the follower has received a heartbeat from the leader
thread::sleep(time::Duration::from_millis(300));
assert_has(&follower_ctx, &follower_storage, k4, v4);
}
#[test]
fn test_invalid_read_index_when_no_leader() {
// Initialize cluster
let mut cluster = new_node_cluster(0, 3);
configure_for_lease_read(&mut cluster, Some(50), Some(3));
cluster.cfg.raft_store.raft_heartbeat_ticks = 1;
cluster.cfg.raft_store.hibernate_regions = false;
let pd_client = Arc::clone(&cluster.pd_client);
pd_client.disable_default_operator();
// Set region and peers
cluster.run();
cluster.must_put(b"k0", b"v0");
// Transfer leader to p2
let region = cluster.get_region(b"k0");
let leader = cluster.leader_of_region(region.get_id()).unwrap();
let mut follower_peers = region.get_peers().to_vec();
follower_peers.retain(|p| p.get_id() != leader.get_id());
let follower = follower_peers.pop().unwrap();
// Delay all raft messages on follower.
let heartbeat_filter = Box::new(
RegionPacketFilter::new(region.get_id(), follower.get_store_id())
.direction(Direction::Recv)
.msg_type(MessageType::MsgHeartbeat)
.when(Arc::new(AtomicBool::new(true))),
);
cluster
.sim
.wl()
.add_recv_filter(follower.get_store_id(), heartbeat_filter);
let vote_resp_filter = Box::new(
RegionPacketFilter::new(region.get_id(), follower.get_store_id())
.direction(Direction::Recv)
.msg_type(MessageType::MsgRequestVoteResponse)
.when(Arc::new(AtomicBool::new(true))),
);
cluster
.sim
.wl()
.add_recv_filter(follower.get_store_id(), vote_resp_filter);
// wait for election timeout
thread::sleep(time::Duration::from_millis(300));
// send read index requests to follower
let mut request = new_request(
region.get_id(),
region.get_region_epoch().clone(),
vec![new_read_index_cmd()],
true,
);
request.mut_header().set_peer(follower.clone());
let (cb, rx) = make_cb(&request);
cluster
.sim
.rl()
.async_command_on_node(follower.get_store_id(), request, cb)
.unwrap();
let resp = rx.recv_timeout(time::Duration::from_millis(500)).unwrap();
assert!(
resp.get_header()
.get_error()
.get_message()
.contains("can not read index due to no leader"),
"{:?}",
resp.get_header()
);
}
fn must_put<E: Engine>(ctx: &Context, engine: &E, key: &[u8], value: &[u8]) {
engine.put(ctx, Key::from_raw(key), value.to_vec()).unwrap();
}
fn must_put_cf<E: Engine>(ctx: &Context, engine: &E, cf: CfName, key: &[u8], value: &[u8]) {
engine
.put_cf(ctx, cf, Key::from_raw(key), value.to_vec())
.unwrap();
}
fn must_delete<E: Engine>(ctx: &Context, engine: &E, key: &[u8]) {
engine.delete(ctx, Key::from_raw(key)).unwrap();
}
fn must_delete_cf<E: Engine>(ctx: &Context, engine: &E, cf: CfName, key: &[u8]) {
engine.delete_cf(ctx, cf, Key::from_raw(key)).unwrap();
}
fn assert_has<E: Engine>(ctx: &Context, engine: &E, key: &[u8], value: &[u8]) {
let snapshot = engine.snapshot(ctx).unwrap();
assert_eq!(snapshot.get(&Key::from_raw(key)).unwrap().unwrap(), value);
}
fn can_read<E: Engine>(ctx: &Context, engine: &E, key: &[u8], value: &[u8]) -> bool {
if let Ok(s) = engine.snapshot(ctx) {
assert_eq!(s.get(&Key::from_raw(key)).unwrap().unwrap(), value);
return true;
}
false
}
fn assert_has_cf<E: Engine>(ctx: &Context, engine: &E, cf: CfName, key: &[u8], value: &[u8]) {
let snapshot = engine.snapshot(ctx).unwrap();
assert_eq!(
snapshot.get_cf(cf, &Key::from_raw(key)).unwrap().unwrap(),
value
);
}
fn assert_none<E: Engine>(ctx: &Context, engine: &E, key: &[u8]) {
let snapshot = engine.snapshot(ctx).unwrap();
assert_eq!(snapshot.get(&Key::from_raw(key)).unwrap(), None);
}
fn assert_none_cf<E: Engine>(ctx: &Context, engine: &E, cf: CfName, key: &[u8]) {
let snapshot = engine.snapshot(ctx).unwrap();
assert_eq!(snapshot.get_cf(cf, &Key::from_raw(key)).unwrap(), None);
}
fn assert_seek<E: Engine>(ctx: &Context, engine: &E, key: &[u8], pair: (&[u8], &[u8])) {
let snapshot = engine.snapshot(ctx).unwrap();
let mut cursor = snapshot
.iter(IterOptions::default(), ScanMode::Mixed)
.unwrap();
let mut statistics = CfStatistics::default();
cursor.seek(&Key::from_raw(key), &mut statistics).unwrap();
assert_eq!(cursor.key(&mut statistics), &*bytes::encode_bytes(pair.0));
assert_eq!(cursor.value(&mut statistics), pair.1);
}
fn assert_seek_cf<E: Engine>(
ctx: &Context,
engine: &E,
cf: CfName,
key: &[u8],
pair: (&[u8], &[u8]),
) {
let snapshot = engine.snapshot(ctx).unwrap();
let mut cursor = snapshot
.iter_cf(cf, IterOptions::default(), ScanMode::Mixed)
.unwrap();
let mut statistics = CfStatistics::default();
cursor.seek(&Key::from_raw(key), &mut statistics).unwrap();
assert_eq!(cursor.key(&mut statistics), &*bytes::encode_bytes(pair.0));
assert_eq!(cursor.value(&mut statistics), pair.1);
}
fn assert_near_seek<I: Iterator>(cursor: &mut Cursor<I>, key: &[u8], pair: (&[u8], &[u8])) {
let mut statistics = CfStatistics::default();
assert!(
cursor
.near_seek(&Key::from_raw(key), &mut statistics)
.unwrap(),
hex::encode_upper(key)
);
assert_eq!(cursor.key(&mut statistics), &*bytes::encode_bytes(pair.0));
assert_eq!(cursor.value(&mut statistics), pair.1);
}
fn assert_near_reverse_seek<I: Iterator>(cursor: &mut Cursor<I>, key: &[u8], pair: (&[u8], &[u8])) {
let mut statistics = CfStatistics::default();
assert!(
cursor
.near_reverse_seek(&Key::from_raw(key), &mut statistics)
.unwrap(),
hex::encode_upper(key)
);
assert_eq!(cursor.key(&mut statistics), &*bytes::encode_bytes(pair.0));
assert_eq!(cursor.value(&mut statistics), pair.1);
}
fn get_put<E: Engine>(ctx: &Context, engine: &E) {
assert_none(ctx, engine, b"x");
must_put(ctx, engine, b"x", b"1");
assert_has(ctx, engine, b"x", b"1");
must_put(ctx, engine, b"x", b"2");
assert_has(ctx, engine, b"x", b"2");
}
fn batch<E: Engine>(ctx: &Context, engine: &E) {
engine
.write(
ctx,
vec![
Modify::Put(CF_DEFAULT, Key::from_raw(b"x"), b"1".to_vec()),
Modify::Put(CF_DEFAULT, Key::from_raw(b"y"), b"2".to_vec()),
],
)
.unwrap();
assert_has(ctx, engine, b"x", b"1");
assert_has(ctx, engine, b"y", b"2");
engine
.write(
ctx,
vec![
Modify::Delete(CF_DEFAULT, Key::from_raw(b"x")),
Modify::Delete(CF_DEFAULT, Key::from_raw(b"y")),
],
)
.unwrap();
assert_none(ctx, engine, b"y");
assert_none(ctx, engine, b"y");
}
fn seek<E: Engine>(ctx: &Context, engine: &E) {
must_put(ctx, engine, b"x", b"1");
assert_seek(ctx, engine, b"x", (b"x", b"1"));
assert_seek(ctx, engine, b"a", (b"x", b"1"));
must_put(ctx, engine, b"z", b"2");
assert_seek(ctx, engine, b"y", (b"z", b"2"));
assert_seek(ctx, engine, b"x\x00", (b"z", b"2"));
let snapshot = engine.snapshot(ctx).unwrap();
let mut iter = snapshot
.iter(IterOptions::default(), ScanMode::Mixed)
.unwrap();
let mut statistics = CfStatistics::default();
assert!(!iter
.seek(&Key::from_raw(b"z\x00"), &mut statistics)
.unwrap());
must_delete(ctx, engine, b"x");
must_delete(ctx, engine, b"z");
}
fn near_seek<E: Engine>(ctx: &Context, engine: &E) {
must_put(ctx, engine, b"x", b"1");
must_put(ctx, engine, b"z", b"2");
let snapshot = engine.snapshot(ctx).unwrap();
let mut cursor = snapshot
.iter(IterOptions::default(), ScanMode::Mixed)
.unwrap();
assert_near_seek(&mut cursor, b"x", (b"x", b"1"));
assert_near_seek(&mut cursor, b"a", (b"x", b"1"));
assert_near_reverse_seek(&mut cursor, b"z1", (b"z", b"2"));
assert_near_reverse_seek(&mut cursor, b"x1", (b"x", b"1"));
assert_near_seek(&mut cursor, b"y", (b"z", b"2"));
assert_near_seek(&mut cursor, b"x\x00", (b"z", b"2"));
let mut statistics = CfStatistics::default();
assert!(!cursor
.near_seek(&Key::from_raw(b"z\x00"), &mut statistics)
.unwrap());
must_delete(ctx, engine, b"x");
must_delete(ctx, engine, b"z");
}
fn cf<E: Engine>(ctx: &Context, engine: &E) {
assert_none_cf(ctx, engine, "default", b"key");
must_put_cf(ctx, engine, "default", b"key", b"value");
assert_has_cf(ctx, engine, "default", b"key", b"value");
assert_seek_cf(ctx, engine, "default", b"k", (b"key", b"value"));
must_delete_cf(ctx, engine, "default", b"key");
assert_none_cf(ctx, engine, "default", b"key");
}
fn empty_write<E: Engine>(ctx: &Context, engine: &E) {
engine.write(ctx, vec![]).unwrap_err();
}
fn wrong_context<E: Engine>(ctx: &Context, engine: &E) {
let region_id = ctx.get_region_id();
let mut ctx = ctx.to_owned();
ctx.set_region_id(region_id + 1);
assert!(engine.write(&ctx, vec![]).is_err());
}
| 32.923077 | 100 | 0.616889 |
e5945517eb0b2697735d467e809a42763424f62f | 49,671 | pub type fflags_t = u32;
pub type clock_t = i32;
pub type lwpid_t = i32;
pub type blksize_t = i32;
pub type clockid_t = ::c_int;
pub type sem_t = _sem;
pub type fsblkcnt_t = u64;
pub type fsfilcnt_t = u64;
pub type idtype_t = ::c_uint;
pub type key_t = ::c_long;
pub type msglen_t = ::c_ulong;
pub type msgqnum_t = ::c_ulong;
pub type mqd_t = *mut ::c_void;
pub type posix_spawnattr_t = *mut ::c_void;
pub type posix_spawn_file_actions_t = *mut ::c_void;
s! {
pub struct aiocb {
pub aio_fildes: ::c_int,
pub aio_offset: ::off_t,
pub aio_buf: *mut ::c_void,
pub aio_nbytes: ::size_t,
__unused1: [::c_int; 2],
__unused2: *mut ::c_void,
pub aio_lio_opcode: ::c_int,
pub aio_reqprio: ::c_int,
// unused 3 through 5 are the __aiocb_private structure
__unused3: ::c_long,
__unused4: ::c_long,
__unused5: *mut ::c_void,
pub aio_sigevent: sigevent
}
pub struct jail {
pub version: u32,
pub path: *mut ::c_char,
pub hostname: *mut ::c_char,
pub jailname: *mut ::c_char,
pub ip4s: ::c_uint,
pub ip6s: ::c_uint,
pub ip4: *mut ::in_addr,
pub ip6: *mut ::in6_addr,
}
pub struct sigevent {
pub sigev_notify: ::c_int,
pub sigev_signo: ::c_int,
pub sigev_value: ::sigval,
//The rest of the structure is actually a union. We expose only
//sigev_notify_thread_id because it's the most useful union member.
pub sigev_notify_thread_id: ::lwpid_t,
#[cfg(target_pointer_width = "64")]
__unused1: ::c_int,
__unused2: [::c_long; 7]
}
pub struct statvfs {
pub f_bavail: ::fsblkcnt_t,
pub f_bfree: ::fsblkcnt_t,
pub f_blocks: ::fsblkcnt_t,
pub f_favail: ::fsfilcnt_t,
pub f_ffree: ::fsfilcnt_t,
pub f_files: ::fsfilcnt_t,
pub f_bsize: ::c_ulong,
pub f_flag: ::c_ulong,
pub f_frsize: ::c_ulong,
pub f_fsid: ::c_ulong,
pub f_namemax: ::c_ulong,
}
// internal structure has changed over time
pub struct _sem {
data: [u32; 4],
}
pub struct ipc_perm {
pub cuid: ::uid_t,
pub cgid: ::gid_t,
pub uid: ::uid_t,
pub gid: ::gid_t,
pub mode: ::mode_t,
pub seq: ::c_ushort,
pub key: ::key_t,
}
pub struct msqid_ds {
pub msg_perm: ::ipc_perm,
__unused1: *mut ::c_void,
__unused2: *mut ::c_void,
pub msg_cbytes: ::msglen_t,
pub msg_qnum: ::msgqnum_t,
pub msg_qbytes: ::msglen_t,
pub msg_lspid: ::pid_t,
pub msg_lrpid: ::pid_t,
pub msg_stime: ::time_t,
pub msg_rtime: ::time_t,
pub msg_ctime: ::time_t,
}
pub struct xucred {
pub cr_version: ::c_uint,
pub cr_uid: ::uid_t,
pub cr_ngroups: ::c_short,
pub cr_groups: [::gid_t;16],
__cr_unused1: *mut ::c_void,
}
pub struct stack_t {
pub ss_sp: *mut ::c_void,
pub ss_size: ::size_t,
pub ss_flags: ::c_int,
}
pub struct mmsghdr {
pub msg_hdr: ::msghdr,
pub msg_len: ::ssize_t,
}
}
s_no_extra_traits! {
pub struct utmpx {
pub ut_type: ::c_short,
pub ut_tv: ::timeval,
pub ut_id: [::c_char; 8],
pub ut_pid: ::pid_t,
pub ut_user: [::c_char; 32],
pub ut_line: [::c_char; 16],
pub ut_host: [::c_char; 128],
pub __ut_spare: [::c_char; 64],
}
pub struct sockaddr_dl {
pub sdl_len: ::c_uchar,
pub sdl_family: ::c_uchar,
pub sdl_index: ::c_ushort,
pub sdl_type: ::c_uchar,
pub sdl_nlen: ::c_uchar,
pub sdl_alen: ::c_uchar,
pub sdl_slen: ::c_uchar,
pub sdl_data: [::c_char; 46],
}
pub struct mq_attr {
pub mq_flags: ::c_long,
pub mq_maxmsg: ::c_long,
pub mq_msgsize: ::c_long,
pub mq_curmsgs: ::c_long,
__reserved: [::c_long; 4]
}
}
cfg_if! {
if #[cfg(feature = "extra_traits")] {
impl PartialEq for utmpx {
fn eq(&self, other: &utmpx) -> bool {
self.ut_type == other.ut_type
&& self.ut_tv == other.ut_tv
&& self.ut_id == other.ut_id
&& self.ut_pid == other.ut_pid
&& self.ut_user == other.ut_user
&& self.ut_line == other.ut_line
&& self
.ut_host
.iter()
.zip(other.ut_host.iter())
.all(|(a,b)| a == b)
&& self
.__ut_spare
.iter()
.zip(other.__ut_spare.iter())
.all(|(a,b)| a == b)
}
}
impl Eq for utmpx {}
impl ::fmt::Debug for utmpx {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("utmpx")
.field("ut_type", &self.ut_type)
.field("ut_tv", &self.ut_tv)
.field("ut_id", &self.ut_id)
.field("ut_pid", &self.ut_pid)
.field("ut_user", &self.ut_user)
.field("ut_line", &self.ut_line)
// FIXME: .field("ut_host", &self.ut_host)
// FIXME: .field("__ut_spare", &self.__ut_spare)
.finish()
}
}
impl ::hash::Hash for utmpx {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.ut_type.hash(state);
self.ut_tv.hash(state);
self.ut_id.hash(state);
self.ut_pid.hash(state);
self.ut_user.hash(state);
self.ut_line.hash(state);
self.ut_host.hash(state);
self.__ut_spare.hash(state);
}
}
impl PartialEq for sockaddr_dl {
fn eq(&self, other: &sockaddr_dl) -> bool {
self.sdl_len == other.sdl_len
&& self.sdl_family == other.sdl_family
&& self.sdl_index == other.sdl_index
&& self.sdl_type == other.sdl_type
&& self.sdl_nlen == other.sdl_nlen
&& self.sdl_alen == other.sdl_alen
&& self.sdl_slen == other.sdl_slen
&& self
.sdl_data
.iter()
.zip(other.sdl_data.iter())
.all(|(a,b)| a == b)
}
}
impl Eq for sockaddr_dl {}
impl ::fmt::Debug for sockaddr_dl {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("sockaddr_dl")
.field("sdl_len", &self.sdl_len)
.field("sdl_family", &self.sdl_family)
.field("sdl_index", &self.sdl_index)
.field("sdl_type", &self.sdl_type)
.field("sdl_nlen", &self.sdl_nlen)
.field("sdl_alen", &self.sdl_alen)
.field("sdl_slen", &self.sdl_slen)
// FIXME: .field("sdl_data", &self.sdl_data)
.finish()
}
}
impl ::hash::Hash for sockaddr_dl {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.sdl_len.hash(state);
self.sdl_family.hash(state);
self.sdl_index.hash(state);
self.sdl_type.hash(state);
self.sdl_nlen.hash(state);
self.sdl_alen.hash(state);
self.sdl_slen.hash(state);
self.sdl_data.hash(state);
}
}
impl PartialEq for mq_attr {
fn eq(&self, other: &mq_attr) -> bool {
self.mq_flags == other.mq_flags &&
self.mq_maxmsg == other.mq_maxmsg &&
self.mq_msgsize == other.mq_msgsize &&
self.mq_curmsgs == other.mq_curmsgs
}
}
impl Eq for mq_attr {}
impl ::fmt::Debug for mq_attr {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("mq_attr")
.field("mq_flags", &self.mq_flags)
.field("mq_maxmsg", &self.mq_maxmsg)
.field("mq_msgsize", &self.mq_msgsize)
.field("mq_curmsgs", &self.mq_curmsgs)
.finish()
}
}
impl ::hash::Hash for mq_attr {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.mq_flags.hash(state);
self.mq_maxmsg.hash(state);
self.mq_msgsize.hash(state);
self.mq_curmsgs.hash(state);
}
}
}
}
pub const SIGEV_THREAD_ID: ::c_int = 4;
pub const EXTATTR_NAMESPACE_EMPTY: ::c_int = 0;
pub const EXTATTR_NAMESPACE_USER: ::c_int = 1;
pub const EXTATTR_NAMESPACE_SYSTEM: ::c_int = 2;
pub const RAND_MAX: ::c_int = 0x7fff_fffd;
pub const PTHREAD_STACK_MIN: ::size_t = 2048;
pub const PTHREAD_MUTEX_ADAPTIVE_NP: ::c_int = 4;
pub const SIGSTKSZ: ::size_t = 34816;
pub const SF_NODISKIO: ::c_int = 0x00000001;
pub const SF_MNOWAIT: ::c_int = 0x00000002;
pub const SF_SYNC: ::c_int = 0x00000004;
pub const SF_USER_READAHEAD: ::c_int = 0x00000008;
pub const SF_NOCACHE: ::c_int = 0x00000010;
pub const O_CLOEXEC: ::c_int = 0x00100000;
pub const O_DIRECTORY: ::c_int = 0x00020000;
pub const O_EXEC: ::c_int = 0x00040000;
pub const O_TTY_INIT: ::c_int = 0x00080000;
pub const F_GETLK: ::c_int = 11;
pub const F_SETLK: ::c_int = 12;
pub const F_SETLKW: ::c_int = 13;
pub const ENOTCAPABLE: ::c_int = 93;
pub const ECAPMODE: ::c_int = 94;
pub const ENOTRECOVERABLE: ::c_int = 95;
pub const EOWNERDEAD: ::c_int = 96;
pub const ELAST: ::c_int = 96;
pub const RLIMIT_NPTS: ::c_int = 11;
pub const RLIMIT_SWAP: ::c_int = 12;
pub const RLIMIT_KQUEUES: ::c_int = 13;
pub const RLIMIT_UMTXP: ::c_int = 14;
pub const RLIM_NLIMITS: ::rlim_t = 15;
pub const Q_GETQUOTA: ::c_int = 0x700;
pub const Q_SETQUOTA: ::c_int = 0x800;
pub const POSIX_FADV_NORMAL: ::c_int = 0;
pub const POSIX_FADV_RANDOM: ::c_int = 1;
pub const POSIX_FADV_SEQUENTIAL: ::c_int = 2;
pub const POSIX_FADV_WILLNEED: ::c_int = 3;
pub const POSIX_FADV_DONTNEED: ::c_int = 4;
pub const POSIX_FADV_NOREUSE: ::c_int = 5;
pub const POLLINIGNEOF: ::c_short = 0x2000;
pub const EVFILT_READ: i16 = -1;
pub const EVFILT_WRITE: i16 = -2;
pub const EVFILT_AIO: i16 = -3;
pub const EVFILT_VNODE: i16 = -4;
pub const EVFILT_PROC: i16 = -5;
pub const EVFILT_SIGNAL: i16 = -6;
pub const EVFILT_TIMER: i16 = -7;
pub const EVFILT_PROCDESC: i16 = -8;
pub const EVFILT_FS: i16 = -9;
pub const EVFILT_LIO: i16 = -10;
pub const EVFILT_USER: i16 = -11;
pub const EVFILT_SENDFILE: i16 = -12;
pub const EVFILT_EMPTY: i16 = -13;
pub const EV_ADD: u16 = 0x1;
pub const EV_DELETE: u16 = 0x2;
pub const EV_ENABLE: u16 = 0x4;
pub const EV_DISABLE: u16 = 0x8;
pub const EV_ONESHOT: u16 = 0x10;
pub const EV_CLEAR: u16 = 0x20;
pub const EV_RECEIPT: u16 = 0x40;
pub const EV_DISPATCH: u16 = 0x80;
pub const EV_DROP: u16 = 0x1000;
pub const EV_FLAG1: u16 = 0x2000;
pub const EV_ERROR: u16 = 0x4000;
pub const EV_EOF: u16 = 0x8000;
pub const EV_SYSFLAGS: u16 = 0xf000;
pub const NOTE_TRIGGER: u32 = 0x01000000;
pub const NOTE_FFNOP: u32 = 0x00000000;
pub const NOTE_FFAND: u32 = 0x40000000;
pub const NOTE_FFOR: u32 = 0x80000000;
pub const NOTE_FFCOPY: u32 = 0xc0000000;
pub const NOTE_FFCTRLMASK: u32 = 0xc0000000;
pub const NOTE_FFLAGSMASK: u32 = 0x00ffffff;
pub const NOTE_LOWAT: u32 = 0x00000001;
pub const NOTE_DELETE: u32 = 0x00000001;
pub const NOTE_WRITE: u32 = 0x00000002;
pub const NOTE_EXTEND: u32 = 0x00000004;
pub const NOTE_ATTRIB: u32 = 0x00000008;
pub const NOTE_LINK: u32 = 0x00000010;
pub const NOTE_RENAME: u32 = 0x00000020;
pub const NOTE_REVOKE: u32 = 0x00000040;
pub const NOTE_EXIT: u32 = 0x80000000;
pub const NOTE_FORK: u32 = 0x40000000;
pub const NOTE_EXEC: u32 = 0x20000000;
pub const NOTE_PDATAMASK: u32 = 0x000fffff;
pub const NOTE_PCTRLMASK: u32 = 0xf0000000;
pub const NOTE_TRACK: u32 = 0x00000001;
pub const NOTE_TRACKERR: u32 = 0x00000002;
pub const NOTE_CHILD: u32 = 0x00000004;
pub const NOTE_SECONDS: u32 = 0x00000001;
pub const NOTE_MSECONDS: u32 = 0x00000002;
pub const NOTE_USECONDS: u32 = 0x00000004;
pub const NOTE_NSECONDS: u32 = 0x00000008;
pub const MADV_PROTECT: ::c_int = 10;
pub const RUSAGE_THREAD: ::c_int = 1;
pub const CLOCK_REALTIME: ::clockid_t = 0;
pub const CLOCK_VIRTUAL: ::clockid_t = 1;
pub const CLOCK_PROF: ::clockid_t = 2;
pub const CLOCK_MONOTONIC: ::clockid_t = 4;
pub const CLOCK_UPTIME: ::clockid_t = 5;
pub const CLOCK_UPTIME_PRECISE: ::clockid_t = 7;
pub const CLOCK_UPTIME_FAST: ::clockid_t = 8;
pub const CLOCK_REALTIME_PRECISE: ::clockid_t = 9;
pub const CLOCK_REALTIME_FAST: ::clockid_t = 10;
pub const CLOCK_MONOTONIC_PRECISE: ::clockid_t = 11;
pub const CLOCK_MONOTONIC_FAST: ::clockid_t = 12;
pub const CLOCK_SECOND: ::clockid_t = 13;
pub const CLOCK_THREAD_CPUTIME_ID: ::clockid_t = 14;
pub const CLOCK_PROCESS_CPUTIME_ID: ::clockid_t = 15;
pub const CTL_UNSPEC: ::c_int = 0;
pub const CTL_KERN: ::c_int = 1;
pub const CTL_VM: ::c_int = 2;
pub const CTL_VFS: ::c_int = 3;
pub const CTL_NET: ::c_int = 4;
pub const CTL_DEBUG: ::c_int = 5;
pub const CTL_HW: ::c_int = 6;
pub const CTL_MACHDEP: ::c_int = 7;
pub const CTL_USER: ::c_int = 8;
pub const CTL_P1003_1B: ::c_int = 9;
pub const KERN_OSTYPE: ::c_int = 1;
pub const KERN_OSRELEASE: ::c_int = 2;
pub const KERN_OSREV: ::c_int = 3;
pub const KERN_VERSION: ::c_int = 4;
pub const KERN_MAXVNODES: ::c_int = 5;
pub const KERN_MAXPROC: ::c_int = 6;
pub const KERN_MAXFILES: ::c_int = 7;
pub const KERN_ARGMAX: ::c_int = 8;
pub const KERN_SECURELVL: ::c_int = 9;
pub const KERN_HOSTNAME: ::c_int = 10;
pub const KERN_HOSTID: ::c_int = 11;
pub const KERN_CLOCKRATE: ::c_int = 12;
pub const KERN_VNODE: ::c_int = 13;
pub const KERN_PROC: ::c_int = 14;
pub const KERN_FILE: ::c_int = 15;
pub const KERN_PROF: ::c_int = 16;
pub const KERN_POSIX1: ::c_int = 17;
pub const KERN_NGROUPS: ::c_int = 18;
pub const KERN_JOB_CONTROL: ::c_int = 19;
pub const KERN_SAVED_IDS: ::c_int = 20;
pub const KERN_BOOTTIME: ::c_int = 21;
pub const KERN_NISDOMAINNAME: ::c_int = 22;
pub const KERN_UPDATEINTERVAL: ::c_int = 23;
pub const KERN_OSRELDATE: ::c_int = 24;
pub const KERN_NTP_PLL: ::c_int = 25;
pub const KERN_BOOTFILE: ::c_int = 26;
pub const KERN_MAXFILESPERPROC: ::c_int = 27;
pub const KERN_MAXPROCPERUID: ::c_int = 28;
pub const KERN_DUMPDEV: ::c_int = 29;
pub const KERN_IPC: ::c_int = 30;
pub const KERN_DUMMY: ::c_int = 31;
pub const KERN_PS_STRINGS: ::c_int = 32;
pub const KERN_USRSTACK: ::c_int = 33;
pub const KERN_LOGSIGEXIT: ::c_int = 34;
pub const KERN_IOV_MAX: ::c_int = 35;
pub const KERN_HOSTUUID: ::c_int = 36;
pub const KERN_ARND: ::c_int = 37;
pub const KERN_PROC_ALL: ::c_int = 0;
pub const KERN_PROC_PID: ::c_int = 1;
pub const KERN_PROC_PGRP: ::c_int = 2;
pub const KERN_PROC_SESSION: ::c_int = 3;
pub const KERN_PROC_TTY: ::c_int = 4;
pub const KERN_PROC_UID: ::c_int = 5;
pub const KERN_PROC_RUID: ::c_int = 6;
pub const KERN_PROC_ARGS: ::c_int = 7;
pub const KERN_PROC_PROC: ::c_int = 8;
pub const KERN_PROC_SV_NAME: ::c_int = 9;
pub const KERN_PROC_RGID: ::c_int = 10;
pub const KERN_PROC_GID: ::c_int = 11;
pub const KERN_PROC_PATHNAME: ::c_int = 12;
pub const KERN_PROC_OVMMAP: ::c_int = 13;
pub const KERN_PROC_OFILEDESC: ::c_int = 14;
pub const KERN_PROC_KSTACK: ::c_int = 15;
pub const KERN_PROC_INC_THREAD: ::c_int = 0x10;
pub const KERN_PROC_VMMAP: ::c_int = 32;
pub const KERN_PROC_FILEDESC: ::c_int = 33;
pub const KERN_PROC_GROUPS: ::c_int = 34;
pub const KERN_PROC_ENV: ::c_int = 35;
pub const KERN_PROC_AUXV: ::c_int = 36;
pub const KERN_PROC_RLIMIT: ::c_int = 37;
pub const KERN_PROC_PS_STRINGS: ::c_int = 38;
pub const KERN_PROC_UMASK: ::c_int = 39;
pub const KERN_PROC_OSREL: ::c_int = 40;
pub const KERN_PROC_SIGTRAMP: ::c_int = 41;
pub const KIPC_MAXSOCKBUF: ::c_int = 1;
pub const KIPC_SOCKBUF_WASTE: ::c_int = 2;
pub const KIPC_SOMAXCONN: ::c_int = 3;
pub const KIPC_MAX_LINKHDR: ::c_int = 4;
pub const KIPC_MAX_PROTOHDR: ::c_int = 5;
pub const KIPC_MAX_HDR: ::c_int = 6;
pub const KIPC_MAX_DATALEN: ::c_int = 7;
pub const HW_MACHINE: ::c_int = 1;
pub const HW_MODEL: ::c_int = 2;
pub const HW_NCPU: ::c_int = 3;
pub const HW_BYTEORDER: ::c_int = 4;
pub const HW_PHYSMEM: ::c_int = 5;
pub const HW_USERMEM: ::c_int = 6;
pub const HW_PAGESIZE: ::c_int = 7;
pub const HW_DISKNAMES: ::c_int = 8;
pub const HW_DISKSTATS: ::c_int = 9;
pub const HW_FLOATINGPT: ::c_int = 10;
pub const HW_MACHINE_ARCH: ::c_int = 11;
pub const HW_REALMEM: ::c_int = 12;
pub const USER_CS_PATH: ::c_int = 1;
pub const USER_BC_BASE_MAX: ::c_int = 2;
pub const USER_BC_DIM_MAX: ::c_int = 3;
pub const USER_BC_SCALE_MAX: ::c_int = 4;
pub const USER_BC_STRING_MAX: ::c_int = 5;
pub const USER_COLL_WEIGHTS_MAX: ::c_int = 6;
pub const USER_EXPR_NEST_MAX: ::c_int = 7;
pub const USER_LINE_MAX: ::c_int = 8;
pub const USER_RE_DUP_MAX: ::c_int = 9;
pub const USER_POSIX2_VERSION: ::c_int = 10;
pub const USER_POSIX2_C_BIND: ::c_int = 11;
pub const USER_POSIX2_C_DEV: ::c_int = 12;
pub const USER_POSIX2_CHAR_TERM: ::c_int = 13;
pub const USER_POSIX2_FORT_DEV: ::c_int = 14;
pub const USER_POSIX2_FORT_RUN: ::c_int = 15;
pub const USER_POSIX2_LOCALEDEF: ::c_int = 16;
pub const USER_POSIX2_SW_DEV: ::c_int = 17;
pub const USER_POSIX2_UPE: ::c_int = 18;
pub const USER_STREAM_MAX: ::c_int = 19;
pub const USER_TZNAME_MAX: ::c_int = 20;
pub const CTL_P1003_1B_ASYNCHRONOUS_IO: ::c_int = 1;
pub const CTL_P1003_1B_MAPPED_FILES: ::c_int = 2;
pub const CTL_P1003_1B_MEMLOCK: ::c_int = 3;
pub const CTL_P1003_1B_MEMLOCK_RANGE: ::c_int = 4;
pub const CTL_P1003_1B_MEMORY_PROTECTION: ::c_int = 5;
pub const CTL_P1003_1B_MESSAGE_PASSING: ::c_int = 6;
pub const CTL_P1003_1B_PRIORITIZED_IO: ::c_int = 7;
pub const CTL_P1003_1B_PRIORITY_SCHEDULING: ::c_int = 8;
pub const CTL_P1003_1B_REALTIME_SIGNALS: ::c_int = 9;
pub const CTL_P1003_1B_SEMAPHORES: ::c_int = 10;
pub const CTL_P1003_1B_FSYNC: ::c_int = 11;
pub const CTL_P1003_1B_SHARED_MEMORY_OBJECTS: ::c_int = 12;
pub const CTL_P1003_1B_SYNCHRONIZED_IO: ::c_int = 13;
pub const CTL_P1003_1B_TIMERS: ::c_int = 14;
pub const CTL_P1003_1B_AIO_LISTIO_MAX: ::c_int = 15;
pub const CTL_P1003_1B_AIO_MAX: ::c_int = 16;
pub const CTL_P1003_1B_AIO_PRIO_DELTA_MAX: ::c_int = 17;
pub const CTL_P1003_1B_DELAYTIMER_MAX: ::c_int = 18;
pub const CTL_P1003_1B_MQ_OPEN_MAX: ::c_int = 19;
pub const CTL_P1003_1B_PAGESIZE: ::c_int = 20;
pub const CTL_P1003_1B_RTSIG_MAX: ::c_int = 21;
pub const CTL_P1003_1B_SEM_NSEMS_MAX: ::c_int = 22;
pub const CTL_P1003_1B_SEM_VALUE_MAX: ::c_int = 23;
pub const CTL_P1003_1B_SIGQUEUE_MAX: ::c_int = 24;
pub const CTL_P1003_1B_TIMER_MAX: ::c_int = 25;
pub const TIOCGPTN: ::c_uint = 0x4004740f;
pub const TIOCPTMASTER: ::c_uint = 0x2000741c;
pub const TIOCSIG: ::c_uint = 0x2004745f;
pub const TIOCM_DCD: ::c_int = 0x40;
pub const H4DISC: ::c_int = 0x7;
pub const FIONCLEX: ::c_ulong = 0x20006602;
pub const FIONREAD: ::c_ulong = 0x4004667f;
pub const FIOASYNC: ::c_ulong = 0x8004667d;
pub const FIOSETOWN: ::c_ulong = 0x8004667c;
pub const FIOGETOWN: ::c_ulong = 0x4004667b;
pub const FIODTYPE: ::c_ulong = 0x4004667a;
pub const FIOGETLBA: ::c_ulong = 0x40046679;
pub const FIODGNAME: ::c_ulong = 0x80106678;
pub const FIONWRITE: ::c_ulong = 0x40046677;
pub const FIONSPACE: ::c_ulong = 0x40046676;
pub const FIOSEEKDATA: ::c_ulong = 0xc0086661;
pub const FIOSEEKHOLE: ::c_ulong = 0xc0086662;
pub const JAIL_API_VERSION: u32 = 2;
pub const JAIL_CREATE: ::c_int = 0x01;
pub const JAIL_UPDATE: ::c_int = 0x02;
pub const JAIL_ATTACH: ::c_int = 0x04;
pub const JAIL_DYING: ::c_int = 0x08;
pub const JAIL_SET_MASK: ::c_int = 0x0f;
pub const JAIL_GET_MASK: ::c_int = 0x08;
pub const JAIL_SYS_DISABLE: ::c_int = 0;
pub const JAIL_SYS_NEW: ::c_int = 1;
pub const JAIL_SYS_INHERIT: ::c_int = 2;
pub const SO_BINTIME: ::c_int = 0x2000;
pub const SO_NO_OFFLOAD: ::c_int = 0x4000;
pub const SO_NO_DDP: ::c_int = 0x8000;
pub const SO_REUSEPORT_LB: ::c_int = 0x10000;
pub const SO_LABEL: ::c_int = 0x1009;
pub const SO_PEERLABEL: ::c_int = 0x1010;
pub const SO_LISTENQLIMIT: ::c_int = 0x1011;
pub const SO_LISTENQLEN: ::c_int = 0x1012;
pub const SO_LISTENINCQLEN: ::c_int = 0x1013;
pub const SO_SETFIB: ::c_int = 0x1014;
pub const SO_USER_COOKIE: ::c_int = 0x1015;
pub const SO_PROTOCOL: ::c_int = 0x1016;
pub const SO_PROTOTYPE: ::c_int = SO_PROTOCOL;
pub const SO_VENDOR: ::c_int = 0x80000000;
pub const LOCAL_PEERCRED: ::c_int = 1;
pub const LOCAL_CREDS: ::c_int = 2;
pub const LOCAL_CONNWAIT: ::c_int = 4;
pub const LOCAL_VENDOR: ::c_int = SO_VENDOR;
pub const PT_LWPINFO: ::c_int = 13;
pub const PT_GETNUMLWPS: ::c_int = 14;
pub const PT_GETLWPLIST: ::c_int = 15;
pub const PT_CLEARSTEP: ::c_int = 16;
pub const PT_SETSTEP: ::c_int = 17;
pub const PT_SUSPEND: ::c_int = 18;
pub const PT_RESUME: ::c_int = 19;
pub const PT_TO_SCE: ::c_int = 20;
pub const PT_TO_SCX: ::c_int = 21;
pub const PT_SYSCALL: ::c_int = 22;
pub const PT_FOLLOW_FORK: ::c_int = 23;
pub const PT_LWP_EVENTS: ::c_int = 24;
pub const PT_GET_EVENT_MASK: ::c_int = 25;
pub const PT_SET_EVENT_MASK: ::c_int = 26;
pub const PT_GETREGS: ::c_int = 33;
pub const PT_SETREGS: ::c_int = 34;
pub const PT_GETFPREGS: ::c_int = 35;
pub const PT_SETFPREGS: ::c_int = 36;
pub const PT_GETDBREGS: ::c_int = 37;
pub const PT_SETDBREGS: ::c_int = 38;
pub const PT_VM_TIMESTAMP: ::c_int = 40;
pub const PT_VM_ENTRY: ::c_int = 41;
pub const PT_FIRSTMACH: ::c_int = 64;
pub const PTRACE_EXEC: ::c_int = 0x0001;
pub const PTRACE_SCE: ::c_int = 0x0002;
pub const PTRACE_SCX: ::c_int = 0x0004;
pub const PTRACE_SYSCALL: ::c_int = PTRACE_SCE | PTRACE_SCX;
pub const PTRACE_FORK: ::c_int = 0x0008;
pub const PTRACE_LWP: ::c_int = 0x0010;
pub const PTRACE_VFORK: ::c_int = 0x0020;
pub const PTRACE_DEFAULT: ::c_int = PTRACE_EXEC;
pub const AF_SLOW: ::c_int = 33;
pub const AF_SCLUSTER: ::c_int = 34;
pub const AF_ARP: ::c_int = 35;
pub const AF_BLUETOOTH: ::c_int = 36;
pub const AF_IEEE80211: ::c_int = 37;
pub const AF_INET_SDP: ::c_int = 40;
pub const AF_INET6_SDP: ::c_int = 42;
#[doc(hidden)]
#[deprecated(
since = "0.2.55",
note = "If you are using this report to: \
https://github.com/rust-lang/libc/issues/665"
)]
pub const AF_MAX: ::c_int = 42;
// https://github.com/freebsd/freebsd/blob/master/sys/net/if.h#L140
pub const IFF_UP: ::c_int = 0x1; // (n) interface is up
pub const IFF_BROADCAST: ::c_int = 0x2; // (i) broadcast address valid
pub const IFF_DEBUG: ::c_int = 0x4; // (n) turn on debugging
pub const IFF_LOOPBACK: ::c_int = 0x8; // (i) is a loopback net
pub const IFF_POINTOPOINT: ::c_int = 0x10; // (i) is a point-to-point link
// 0x20 was IFF_SMART
pub const IFF_RUNNING: ::c_int = 0x40; // (d) resources allocated
#[doc(hidden)]
#[deprecated(
since="0.2.54",
note="IFF_DRV_RUNNING is deprecated. Use the portable IFF_RUNNING instead"
)]
pub const IFF_DRV_RUNNING: ::c_int = 0x40;
pub const IFF_NOARP: ::c_int = 0x80; // (n) no address resolution protocol
pub const IFF_PROMISC: ::c_int = 0x100; // (n) receive all packets
pub const IFF_ALLMULTI: ::c_int = 0x200; // (n) receive all multicast packets
pub const IFF_OACTIVE: ::c_int = 0x400; // (d) tx hardware queue is full
#[doc(hidden)]
#[deprecated(
since = "0.2.54",
note = "Use the portable `IFF_OACTIVE` instead",
)]
pub const IFF_DRV_OACTIVE: ::c_int = 0x400;
pub const IFF_SIMPLEX: ::c_int = 0x800; // (i) can't hear own transmissions
pub const IFF_LINK0: ::c_int = 0x1000; // per link layer defined bit
pub const IFF_LINK1: ::c_int = 0x2000; // per link layer defined bit
pub const IFF_LINK2: ::c_int = 0x4000; // per link layer defined bit
pub const IFF_ALTPHYS: ::c_int = IFF_LINK2; // use alternate physical connection
pub const IFF_MULTICAST: ::c_int = 0x8000; // (i) supports multicast
// (i) unconfigurable using ioctl(2)
pub const IFF_CANTCONFIG: ::c_int = 0x10000;
pub const IFF_PPROMISC: ::c_int = 0x20000; // (n) user-requested promisc mode
pub const IFF_MONITOR: ::c_int = 0x40000; // (n) user-requested monitor mode
pub const IFF_STATICARP: ::c_int = 0x80000; // (n) static ARP
pub const IFF_DYING: ::c_int = 0x200000; // (n) interface is winding down
pub const IFF_RENAMING: ::c_int = 0x400000; // (n) interface is being renamed
// sys/netinet/in.h
// Protocols (RFC 1700)
// NOTE: These are in addition to the constants defined in src/unix/mod.rs
// IPPROTO_IP defined in src/unix/mod.rs
/// IP6 hop-by-hop options
pub const IPPROTO_HOPOPTS: ::c_int = 0;
// IPPROTO_ICMP defined in src/unix/mod.rs
/// group mgmt protocol
pub const IPPROTO_IGMP: ::c_int = 2;
/// gateway^2 (deprecated)
pub const IPPROTO_GGP: ::c_int = 3;
/// for compatibility
pub const IPPROTO_IPIP: ::c_int = 4;
// IPPROTO_TCP defined in src/unix/mod.rs
/// Stream protocol II.
pub const IPPROTO_ST: ::c_int = 7;
/// exterior gateway protocol
pub const IPPROTO_EGP: ::c_int = 8;
/// private interior gateway
pub const IPPROTO_PIGP: ::c_int = 9;
/// BBN RCC Monitoring
pub const IPPROTO_RCCMON: ::c_int = 10;
/// network voice protocol
pub const IPPROTO_NVPII: ::c_int = 11;
/// pup
pub const IPPROTO_PUP: ::c_int = 12;
/// Argus
pub const IPPROTO_ARGUS: ::c_int = 13;
/// EMCON
pub const IPPROTO_EMCON: ::c_int = 14;
/// Cross Net Debugger
pub const IPPROTO_XNET: ::c_int = 15;
/// Chaos
pub const IPPROTO_CHAOS: ::c_int = 16;
// IPPROTO_UDP defined in src/unix/mod.rs
/// Multiplexing
pub const IPPROTO_MUX: ::c_int = 18;
/// DCN Measurement Subsystems
pub const IPPROTO_MEAS: ::c_int = 19;
/// Host Monitoring
pub const IPPROTO_HMP: ::c_int = 20;
/// Packet Radio Measurement
pub const IPPROTO_PRM: ::c_int = 21;
/// xns idp
pub const IPPROTO_IDP: ::c_int = 22;
/// Trunk-1
pub const IPPROTO_TRUNK1: ::c_int = 23;
/// Trunk-2
pub const IPPROTO_TRUNK2: ::c_int = 24;
/// Leaf-1
pub const IPPROTO_LEAF1: ::c_int = 25;
/// Leaf-2
pub const IPPROTO_LEAF2: ::c_int = 26;
/// Reliable Data
pub const IPPROTO_RDP: ::c_int = 27;
/// Reliable Transaction
pub const IPPROTO_IRTP: ::c_int = 28;
/// tp-4 w/ class negotiation
pub const IPPROTO_TP: ::c_int = 29;
/// Bulk Data Transfer
pub const IPPROTO_BLT: ::c_int = 30;
/// Network Services
pub const IPPROTO_NSP: ::c_int = 31;
/// Merit Internodal
pub const IPPROTO_INP: ::c_int = 32;
/// Sequential Exchange
pub const IPPROTO_SEP: ::c_int = 33;
/// Third Party Connect
pub const IPPROTO_3PC: ::c_int = 34;
/// InterDomain Policy Routing
pub const IPPROTO_IDPR: ::c_int = 35;
/// XTP
pub const IPPROTO_XTP: ::c_int = 36;
/// Datagram Delivery
pub const IPPROTO_DDP: ::c_int = 37;
/// Control Message Transport
pub const IPPROTO_CMTP: ::c_int = 38;
/// TP++ Transport
pub const IPPROTO_TPXX: ::c_int = 39;
/// IL transport protocol
pub const IPPROTO_IL: ::c_int = 40;
// IPPROTO_IPV6 defined in src/unix/mod.rs
/// Source Demand Routing
pub const IPPROTO_SDRP: ::c_int = 42;
/// IP6 routing header
pub const IPPROTO_ROUTING: ::c_int = 43;
/// IP6 fragmentation header
pub const IPPROTO_FRAGMENT: ::c_int = 44;
/// InterDomain Routing
pub const IPPROTO_IDRP: ::c_int = 45;
/// resource reservation
pub const IPPROTO_RSVP: ::c_int = 46;
/// General Routing Encap.
pub const IPPROTO_GRE: ::c_int = 47;
/// Mobile Host Routing
pub const IPPROTO_MHRP: ::c_int = 48;
/// BHA
pub const IPPROTO_BHA: ::c_int = 49;
/// IP6 Encap Sec. Payload
pub const IPPROTO_ESP: ::c_int = 50;
/// IP6 Auth Header
pub const IPPROTO_AH: ::c_int = 51;
/// Integ. Net Layer Security
pub const IPPROTO_INLSP: ::c_int = 52;
/// IP with encryption
pub const IPPROTO_SWIPE: ::c_int = 53;
/// Next Hop Resolution
pub const IPPROTO_NHRP: ::c_int = 54;
/// IP Mobility
pub const IPPROTO_MOBILE: ::c_int = 55;
/// Transport Layer Security
pub const IPPROTO_TLSP: ::c_int = 56;
/// SKIP
pub const IPPROTO_SKIP: ::c_int = 57;
// IPPROTO_ICMPV6 defined in src/unix/mod.rs
/// IP6 no next header
pub const IPPROTO_NONE: ::c_int = 59;
/// IP6 destination option
pub const IPPROTO_DSTOPTS: ::c_int = 60;
/// any host internal protocol
pub const IPPROTO_AHIP: ::c_int = 61;
/// CFTP
pub const IPPROTO_CFTP: ::c_int = 62;
/// "hello" routing protocol
pub const IPPROTO_HELLO: ::c_int = 63;
/// SATNET/Backroom EXPAK
pub const IPPROTO_SATEXPAK: ::c_int = 64;
/// Kryptolan
pub const IPPROTO_KRYPTOLAN: ::c_int = 65;
/// Remote Virtual Disk
pub const IPPROTO_RVD: ::c_int = 66;
/// Pluribus Packet Core
pub const IPPROTO_IPPC: ::c_int = 67;
/// Any distributed FS
pub const IPPROTO_ADFS: ::c_int = 68;
/// Satnet Monitoring
pub const IPPROTO_SATMON: ::c_int = 69;
/// VISA Protocol
pub const IPPROTO_VISA: ::c_int = 70;
/// Packet Core Utility
pub const IPPROTO_IPCV: ::c_int = 71;
/// Comp. Prot. Net. Executive
pub const IPPROTO_CPNX: ::c_int = 72;
/// Comp. Prot. HeartBeat
pub const IPPROTO_CPHB: ::c_int = 73;
/// Wang Span Network
pub const IPPROTO_WSN: ::c_int = 74;
/// Packet Video Protocol
pub const IPPROTO_PVP: ::c_int = 75;
/// BackRoom SATNET Monitoring
pub const IPPROTO_BRSATMON: ::c_int = 76;
/// Sun net disk proto (temp.)
pub const IPPROTO_ND: ::c_int = 77;
/// WIDEBAND Monitoring
pub const IPPROTO_WBMON: ::c_int = 78;
/// WIDEBAND EXPAK
pub const IPPROTO_WBEXPAK: ::c_int = 79;
/// ISO cnlp
pub const IPPROTO_EON: ::c_int = 80;
/// VMTP
pub const IPPROTO_VMTP: ::c_int = 81;
/// Secure VMTP
pub const IPPROTO_SVMTP: ::c_int = 82;
/// Banyon VINES
pub const IPPROTO_VINES: ::c_int = 83;
/// TTP
pub const IPPROTO_TTP: ::c_int = 84;
/// NSFNET-IGP
pub const IPPROTO_IGP: ::c_int = 85;
/// dissimilar gateway prot.
pub const IPPROTO_DGP: ::c_int = 86;
/// TCF
pub const IPPROTO_TCF: ::c_int = 87;
/// Cisco/GXS IGRP
pub const IPPROTO_IGRP: ::c_int = 88;
/// OSPFIGP
pub const IPPROTO_OSPFIGP: ::c_int = 89;
/// Strite RPC protocol
pub const IPPROTO_SRPC: ::c_int = 90;
/// Locus Address Resoloution
pub const IPPROTO_LARP: ::c_int = 91;
/// Multicast Transport
pub const IPPROTO_MTP: ::c_int = 92;
/// AX.25 Frames
pub const IPPROTO_AX25: ::c_int = 93;
/// IP encapsulated in IP
pub const IPPROTO_IPEIP: ::c_int = 94;
/// Mobile Int.ing control
pub const IPPROTO_MICP: ::c_int = 95;
/// Semaphore Comm. security
pub const IPPROTO_SCCSP: ::c_int = 96;
/// Ethernet IP encapsulation
pub const IPPROTO_ETHERIP: ::c_int = 97;
/// encapsulation header
pub const IPPROTO_ENCAP: ::c_int = 98;
/// any private encr. scheme
pub const IPPROTO_APES: ::c_int = 99;
/// GMTP
pub const IPPROTO_GMTP: ::c_int = 100;
/// payload compression (IPComp)
pub const IPPROTO_IPCOMP: ::c_int = 108;
/// SCTP
pub const IPPROTO_SCTP: ::c_int = 132;
/// IPv6 Mobility Header
pub const IPPROTO_MH: ::c_int = 135;
/// UDP-Lite
pub const IPPROTO_UDPLITE: ::c_int = 136;
/// IP6 Host Identity Protocol
pub const IPPROTO_HIP: ::c_int = 139;
/// IP6 Shim6 Protocol
pub const IPPROTO_SHIM6: ::c_int = 140;
/* 101-254: Partly Unassigned */
/// Protocol Independent Mcast
pub const IPPROTO_PIM: ::c_int = 103;
/// CARP
pub const IPPROTO_CARP: ::c_int = 112;
/// PGM
pub const IPPROTO_PGM: ::c_int = 113;
/// MPLS-in-IP
pub const IPPROTO_MPLS: ::c_int = 137;
/// PFSYNC
pub const IPPROTO_PFSYNC: ::c_int = 240;
/* 255: Reserved */
/* BSD Private, local use, namespace incursion, no longer used */
/// OLD divert pseudo-proto
pub const IPPROTO_OLD_DIVERT: ::c_int = 254;
pub const IPPROTO_MAX: ::c_int = 256;
/// last return value of *_input(), meaning "all job for this pkt is done".
pub const IPPROTO_DONE: ::c_int = 257;
/* Only used internally, so can be outside the range of valid IP protocols. */
/// divert pseudo-protocol
pub const IPPROTO_DIVERT: ::c_int = 258;
/// SeND pseudo-protocol
pub const IPPROTO_SEND: ::c_int = 259;
// sys/netinet/TCP.h
pub const TCP_MD5SIG: ::c_int = 16;
pub const TCP_INFO: ::c_int = 32;
pub const TCP_CONGESTION: ::c_int = 64;
pub const TCP_CCALGOOPT: ::c_int = 65;
pub const TCP_KEEPINIT: ::c_int = 128;
pub const TCP_FASTOPEN: ::c_int = 1025;
pub const TCP_PCAP_OUT: ::c_int = 2048;
pub const TCP_PCAP_IN: ::c_int = 4096;
pub const IP_BINDANY: ::c_int = 24;
pub const IP_BINDMULTI: ::c_int = 25;
pub const IP_RSS_LISTEN_BUCKET: ::c_int = 26;
pub const IP_ORIGDSTADDR : ::c_int = 27;
pub const IP_RECVORIGDSTADDR : ::c_int = IP_ORIGDSTADDR;
pub const IP_RECVTOS: ::c_int = 68;
pub const IPV6_ORIGDSTADDR: ::c_int = 72;
pub const IPV6_RECVORIGDSTADDR: ::c_int = IPV6_ORIGDSTADDR;
pub const PF_SLOW: ::c_int = AF_SLOW;
pub const PF_SCLUSTER: ::c_int = AF_SCLUSTER;
pub const PF_ARP: ::c_int = AF_ARP;
pub const PF_BLUETOOTH: ::c_int = AF_BLUETOOTH;
pub const PF_IEEE80211: ::c_int = AF_IEEE80211;
pub const PF_INET_SDP: ::c_int = AF_INET_SDP;
pub const PF_INET6_SDP: ::c_int = AF_INET6_SDP;
#[doc(hidden)]
#[deprecated(
since = "0.2.55",
note = "If you are using this report to: \
https://github.com/rust-lang/libc/issues/665"
)]
#[allow(deprecated)]
pub const PF_MAX: ::c_int = AF_MAX;
pub const NET_RT_DUMP: ::c_int = 1;
pub const NET_RT_FLAGS: ::c_int = 2;
pub const NET_RT_IFLIST: ::c_int = 3;
pub const NET_RT_IFMALIST: ::c_int = 4;
pub const NET_RT_IFLISTL: ::c_int = 5;
// System V IPC
pub const IPC_PRIVATE: ::key_t = 0;
pub const IPC_CREAT: ::c_int = 0o1000;
pub const IPC_EXCL: ::c_int = 0o2000;
pub const IPC_NOWAIT: ::c_int = 0o4000;
pub const IPC_RMID: ::c_int = 0;
pub const IPC_SET: ::c_int = 1;
pub const IPC_STAT: ::c_int = 2;
pub const IPC_INFO: ::c_int = 3;
pub const IPC_R : ::c_int = 0o400;
pub const IPC_W : ::c_int = 0o200;
pub const IPC_M : ::c_int = 0o10000;
pub const MSG_NOERROR: ::c_int = 0o10000;
pub const SHM_RDONLY: ::c_int = 0o10000;
pub const SHM_RND: ::c_int = 0o20000;
pub const SHM_R: ::c_int = 0o400;
pub const SHM_W: ::c_int = 0o200;
pub const SHM_LOCK: ::c_int = 11;
pub const SHM_UNLOCK: ::c_int = 12;
pub const SHM_STAT: ::c_int = 13;
pub const SHM_INFO: ::c_int = 14;
pub const SHM_ANON: *mut ::c_char = 1 as *mut ::c_char;
// The *_MAXID constants never should've been used outside of the
// FreeBSD base system. And with the exception of CTL_P1003_1B_MAXID,
// they were all removed in svn r262489. They remain here for backwards
// compatibility only, and are scheduled to be removed in libc 1.0.0.
#[doc(hidden)]
#[deprecated(since="0.2.54",note="Removed in FreeBSD 11")]
#[allow(deprecated)]
pub const NET_MAXID: ::c_int = AF_MAX;
#[doc(hidden)]
#[deprecated(since="0.2.54",note="Removed in FreeBSD 11")]
pub const CTL_MAXID: ::c_int = 10;
#[doc(hidden)]
#[deprecated(since="0.2.54",note="Removed in FreeBSD 11")]
pub const KERN_MAXID: ::c_int = 38;
#[doc(hidden)]
#[deprecated(since="0.2.54",note="Removed in FreeBSD 11")]
pub const HW_MAXID: ::c_int = 13;
#[doc(hidden)]
#[deprecated(since="0.2.54",note="Removed in FreeBSD 11")]
pub const USER_MAXID: ::c_int = 21;
#[doc(hidden)]
pub const CTL_P1003_1B_MAXID: ::c_int = 26;
pub const MSG_NOTIFICATION: ::c_int = 0x00002000;
pub const MSG_NBIO: ::c_int = 0x00004000;
pub const MSG_COMPAT: ::c_int = 0x00008000;
pub const MSG_CMSG_CLOEXEC: ::c_int = 0x00040000;
pub const MSG_NOSIGNAL: ::c_int = 0x20000;
pub const EMPTY: ::c_short = 0;
pub const BOOT_TIME: ::c_short = 1;
pub const OLD_TIME: ::c_short = 2;
pub const NEW_TIME: ::c_short = 3;
pub const USER_PROCESS: ::c_short = 4;
pub const INIT_PROCESS: ::c_short = 5;
pub const LOGIN_PROCESS: ::c_short = 6;
pub const DEAD_PROCESS: ::c_short = 7;
pub const SHUTDOWN_TIME: ::c_short = 8;
pub const LC_COLLATE_MASK: ::c_int = (1 << 0);
pub const LC_CTYPE_MASK: ::c_int = (1 << 1);
pub const LC_MONETARY_MASK: ::c_int =(1 << 2);
pub const LC_NUMERIC_MASK: ::c_int = (1 << 3);
pub const LC_TIME_MASK: ::c_int = (1 << 4);
pub const LC_MESSAGES_MASK: ::c_int = (1 << 5);
pub const LC_ALL_MASK: ::c_int = LC_COLLATE_MASK
| LC_CTYPE_MASK
| LC_MESSAGES_MASK
| LC_MONETARY_MASK
| LC_NUMERIC_MASK
| LC_TIME_MASK;
pub const WSTOPPED: ::c_int = 2; // same as WUNTRACED
pub const WCONTINUED: ::c_int = 4;
pub const WNOWAIT: ::c_int = 8;
pub const WEXITED: ::c_int = 16;
pub const WTRAPPED: ::c_int = 32;
// FreeBSD defines a great many more of these, we only expose the
// standardized ones.
pub const P_PID: idtype_t = 0;
pub const P_PGID: idtype_t = 2;
pub const P_ALL: idtype_t = 7;
pub const B460800: ::speed_t = 460800;
pub const B921600: ::speed_t = 921600;
pub const AT_FDCWD: ::c_int = -100;
pub const AT_EACCESS: ::c_int = 0x100;
pub const AT_SYMLINK_NOFOLLOW: ::c_int = 0x200;
pub const AT_SYMLINK_FOLLOW: ::c_int = 0x400;
pub const AT_REMOVEDIR: ::c_int = 0x800;
pub const TABDLY: ::tcflag_t = 0x00000004;
pub const TAB0: ::tcflag_t = 0x00000000;
pub const TAB3: ::tcflag_t = 0x00000004;
pub const _PC_ACL_NFS4: ::c_int = 64;
pub const _SC_CPUSET_SIZE: ::c_int = 122;
pub const XU_NGROUPS: ::c_int = 16;
pub const XUCRED_VERSION: ::c_uint = 0;
// Flags which can be passed to pdfork(2)
pub const PD_DAEMON: ::c_int = 0x00000001;
pub const PD_CLOEXEC: ::c_int = 0x00000002;
pub const PD_ALLOWED_AT_FORK: ::c_int = PD_DAEMON | PD_CLOEXEC;
// Values for struct rtprio (type_ field)
pub const RTP_PRIO_REALTIME: ::c_ushort = 2;
pub const RTP_PRIO_NORMAL: ::c_ushort = 3;
pub const RTP_PRIO_IDLE: ::c_ushort = 4;
pub const POSIX_SPAWN_RESETIDS: ::c_int = 0x01;
pub const POSIX_SPAWN_SETPGROUP: ::c_int = 0x02;
pub const POSIX_SPAWN_SETSCHEDPARAM: ::c_int = 0x04;
pub const POSIX_SPAWN_SETSCHEDULER: ::c_int = 0x08;
pub const POSIX_SPAWN_SETSIGDEF: ::c_int = 0x10;
pub const POSIX_SPAWN_SETSIGMASK: ::c_int = 0x20;
// Flags for chflags(2)
pub const UF_SYSTEM: ::c_ulong = 0x00000080;
pub const UF_SPARSE: ::c_ulong = 0x00000100;
pub const UF_OFFLINE: ::c_ulong = 0x00000200;
pub const UF_REPARSE: ::c_ulong = 0x00000400;
pub const UF_ARCHIVE: ::c_ulong = 0x00000800;
pub const UF_READONLY: ::c_ulong = 0x00001000;
pub const UF_HIDDEN: ::c_ulong = 0x00008000;
pub const SF_SNAPSHOT: ::c_ulong = 0x00200000;
fn _ALIGN(p: usize) -> usize {
(p + _ALIGNBYTES) & !_ALIGNBYTES
}
f! {
pub fn CMSG_DATA(cmsg: *const ::cmsghdr) -> *mut ::c_uchar {
(cmsg as *mut ::c_uchar)
.offset(_ALIGN(::mem::size_of::<::cmsghdr>()) as isize)
}
pub fn CMSG_LEN(length: ::c_uint) -> ::c_uint {
_ALIGN(::mem::size_of::<::cmsghdr>()) as ::c_uint + length
}
pub fn CMSG_NXTHDR(mhdr: *const ::msghdr, cmsg: *const ::cmsghdr)
-> *mut ::cmsghdr
{
if cmsg.is_null() {
return ::CMSG_FIRSTHDR(mhdr);
};
let next = cmsg as usize + _ALIGN((*cmsg).cmsg_len as usize)
+ _ALIGN(::mem::size_of::<::cmsghdr>());
let max = (*mhdr).msg_control as usize
+ (*mhdr).msg_controllen as usize;
if next > max {
0 as *mut ::cmsghdr
} else {
(cmsg as usize + _ALIGN((*cmsg).cmsg_len as usize))
as *mut ::cmsghdr
}
}
pub fn CMSG_SPACE(length: ::c_uint) -> ::c_uint {
(_ALIGN(::mem::size_of::<::cmsghdr>()) + _ALIGN(length as usize))
as ::c_uint
}
pub fn uname(buf: *mut ::utsname) -> ::c_int {
__xuname(256, buf as *mut ::c_void)
}
}
extern {
pub fn __error() -> *mut ::c_int;
pub fn clock_getres(clk_id: ::clockid_t, tp: *mut ::timespec) -> ::c_int;
pub fn clock_gettime(clk_id: ::clockid_t, tp: *mut ::timespec) -> ::c_int;
pub fn clock_settime(clk_id: ::clockid_t, tp: *const ::timespec) -> ::c_int;
pub fn extattr_delete_fd(fd: ::c_int,
attrnamespace: ::c_int,
attrname: *const ::c_char) -> ::c_int;
pub fn extattr_delete_file(path: *const ::c_char,
attrnamespace: ::c_int,
attrname: *const ::c_char) -> ::c_int;
pub fn extattr_delete_link(path: *const ::c_char,
attrnamespace: ::c_int,
attrname: *const ::c_char) -> ::c_int;
pub fn extattr_get_fd(fd: ::c_int,
attrnamespace: ::c_int,
attrname: *const ::c_char,
data: *mut ::c_void,
nbytes: ::size_t) -> ::ssize_t;
pub fn extattr_get_file(path: *const ::c_char,
attrnamespace: ::c_int,
attrname: *const ::c_char,
data: *mut ::c_void,
nbytes: ::size_t) -> ::ssize_t;
pub fn extattr_get_link(path: *const ::c_char,
attrnamespace: ::c_int,
attrname: *const ::c_char,
data: *mut ::c_void,
nbytes: ::size_t) -> ::ssize_t;
pub fn extattr_list_fd(fd: ::c_int,
attrnamespace: ::c_int,
data: *mut ::c_void,
nbytes: ::size_t) -> ::ssize_t;
pub fn extattr_list_file(path: *const ::c_char,
attrnamespace: ::c_int,
data: *mut ::c_void,
nbytes: ::size_t) -> ::ssize_t;
pub fn extattr_list_link(path: *const ::c_char,
attrnamespace: ::c_int,
data: *mut ::c_void,
nbytes: ::size_t) -> ::ssize_t;
pub fn extattr_set_fd(fd: ::c_int,
attrnamespace: ::c_int,
attrname: *const ::c_char,
data: *const ::c_void,
nbytes: ::size_t) -> ::ssize_t;
pub fn extattr_set_file(path: *const ::c_char,
attrnamespace: ::c_int,
attrname: *const ::c_char,
data: *const ::c_void,
nbytes: ::size_t) -> ::ssize_t;
pub fn extattr_set_link(path: *const ::c_char,
attrnamespace: ::c_int,
attrname: *const ::c_char,
data: *const ::c_void,
nbytes: ::size_t) -> ::ssize_t;
pub fn jail(jail: *mut ::jail) -> ::c_int;
pub fn jail_attach(jid: ::c_int) -> ::c_int;
pub fn jail_remove(jid: ::c_int) -> ::c_int;
pub fn jail_get(iov: *mut ::iovec, niov: ::c_uint, flags: ::c_int)
-> ::c_int;
pub fn jail_set(iov: *mut ::iovec, niov: ::c_uint, flags: ::c_int)
-> ::c_int;
pub fn fdatasync(fd: ::c_int) -> ::c_int;
pub fn posix_fallocate(fd: ::c_int, offset: ::off_t,
len: ::off_t) -> ::c_int;
pub fn posix_fadvise(fd: ::c_int, offset: ::off_t, len: ::off_t,
advise: ::c_int) -> ::c_int;
pub fn mkostemp(template: *mut ::c_char, flags: ::c_int) -> ::c_int;
pub fn mkostemps(template: *mut ::c_char,
suffixlen: ::c_int,
flags: ::c_int) -> ::c_int;
pub fn getutxuser(user: *const ::c_char) -> *mut utmpx;
pub fn setutxdb(_type: ::c_int, file: *const ::c_char) -> ::c_int;
pub fn aio_waitcomplete(iocbp: *mut *mut aiocb,
timeout: *mut ::timespec) -> ::ssize_t;
pub fn mq_getfd_np(mqd: ::mqd_t) -> ::c_int;
pub fn waitid(idtype: idtype_t, id: ::id_t, infop: *mut ::siginfo_t,
options: ::c_int) -> ::c_int;
pub fn ftok(pathname: *const ::c_char, proj_id: ::c_int) -> ::key_t;
pub fn shmget(key: ::key_t, size: ::size_t, shmflg: ::c_int) -> ::c_int;
pub fn shmat(shmid: ::c_int, shmaddr: *const ::c_void,
shmflg: ::c_int) -> *mut ::c_void;
pub fn shmdt(shmaddr: *const ::c_void) -> ::c_int;
pub fn shmctl(shmid: ::c_int, cmd: ::c_int,
buf: *mut ::shmid_ds) -> ::c_int;
pub fn msgctl(msqid: ::c_int, cmd: ::c_int,
buf: *mut ::msqid_ds) -> ::c_int;
pub fn msgget(key: ::key_t, msgflg: ::c_int) -> ::c_int;
pub fn msgsnd(msqid: ::c_int, msgp: *const ::c_void, msgsz: ::size_t,
msgflg: ::c_int) -> ::c_int;
pub fn cfmakesane(termios: *mut ::termios);
pub fn fexecve(fd: ::c_int, argv: *const *const ::c_char,
envp: *const *const ::c_char)
-> ::c_int;
pub fn pdfork(fdp: *mut ::c_int, flags: ::c_int) -> ::pid_t;
pub fn pdgetpid(fd: ::c_int, pidp: *mut ::pid_t) -> ::c_int;
pub fn pdkill(fd: ::c_int, signum: ::c_int) -> ::c_int;
pub fn rtprio_thread(function: ::c_int, lwpid: ::lwpid_t,
rtp: *mut super::rtprio) -> ::c_int;
pub fn posix_spawn(pid: *mut ::pid_t,
path: *const ::c_char,
file_actions: *const ::posix_spawn_file_actions_t,
attrp: *const ::posix_spawnattr_t,
argv: *const *mut ::c_char,
envp: *const *mut ::c_char) -> ::c_int;
pub fn posix_spawnp(pid: *mut ::pid_t,
file: *const ::c_char,
file_actions: *const ::posix_spawn_file_actions_t,
attrp: *const ::posix_spawnattr_t,
argv: *const *mut ::c_char,
envp: *const *mut ::c_char) -> ::c_int;
pub fn posix_spawnattr_init(attr: *mut posix_spawnattr_t) -> ::c_int;
pub fn posix_spawnattr_destroy(attr: *mut posix_spawnattr_t) -> ::c_int;
pub fn posix_spawnattr_getsigdefault(attr: *const posix_spawnattr_t,
default: *mut ::sigset_t) -> ::c_int;
pub fn posix_spawnattr_setsigdefault(attr: *mut posix_spawnattr_t,
default: *const ::sigset_t) -> ::c_int;
pub fn posix_spawnattr_getsigmask(attr: *const posix_spawnattr_t,
default: *mut ::sigset_t) -> ::c_int;
pub fn posix_spawnattr_setsigmask(attr: *mut posix_spawnattr_t,
default: *const ::sigset_t) -> ::c_int;
pub fn posix_spawnattr_getflags(attr: *const posix_spawnattr_t,
flags: *mut ::c_short) -> ::c_int;
pub fn posix_spawnattr_setflags(attr: *mut posix_spawnattr_t,
flags: ::c_short) -> ::c_int;
pub fn posix_spawnattr_getpgroup(attr: *const posix_spawnattr_t,
flags: *mut ::pid_t) -> ::c_int;
pub fn posix_spawnattr_setpgroup(attr: *mut posix_spawnattr_t,
flags: ::pid_t) -> ::c_int;
pub fn posix_spawnattr_getschedpolicy(attr: *const posix_spawnattr_t,
flags: *mut ::c_int) -> ::c_int;
pub fn posix_spawnattr_setschedpolicy(attr: *mut posix_spawnattr_t,
flags: ::c_int) -> ::c_int;
pub fn posix_spawnattr_getschedparam(
attr: *const posix_spawnattr_t,
param: *mut ::sched_param,
) -> ::c_int;
pub fn posix_spawnattr_setschedparam(
attr: *mut posix_spawnattr_t,
param: *const ::sched_param,
) -> ::c_int;
pub fn posix_spawn_file_actions_init(
actions: *mut posix_spawn_file_actions_t,
) -> ::c_int;
pub fn posix_spawn_file_actions_destroy(
actions: *mut posix_spawn_file_actions_t,
) -> ::c_int;
pub fn posix_spawn_file_actions_addopen(
actions: *mut posix_spawn_file_actions_t,
fd: ::c_int,
path: *const ::c_char,
oflag: ::c_int,
mode: ::mode_t,
) -> ::c_int;
pub fn posix_spawn_file_actions_addclose(
actions: *mut posix_spawn_file_actions_t,
fd: ::c_int,
) -> ::c_int;
pub fn posix_spawn_file_actions_adddup2(
actions: *mut posix_spawn_file_actions_t,
fd: ::c_int,
newfd: ::c_int,
) -> ::c_int;
pub fn statfs(path: *const ::c_char, buf: *mut statfs) -> ::c_int;
pub fn fstatfs(fd: ::c_int, buf: *mut statfs) -> ::c_int;
pub fn dup3(src: ::c_int, dst: ::c_int, flags: ::c_int) -> ::c_int;
pub fn __xuname(nmln: ::c_int, buf: *mut ::c_void) -> ::c_int;
pub fn sendmmsg(sockfd: ::c_int, msgvec: *mut ::mmsghdr, vlen: ::size_t,
flags: ::c_int) -> ::ssize_t;
pub fn recvmmsg(sockfd: ::c_int, msgvec: *mut ::mmsghdr, vlen: ::size_t,
flags: ::c_int, timeout: *const ::timespec) -> ::ssize_t;
}
#[link(name = "util")]
extern {
pub fn extattr_namespace_to_string(attrnamespace: ::c_int,
string: *mut *mut ::c_char) -> ::c_int;
pub fn extattr_string_to_namespace(string: *const ::c_char,
attrnamespace: *mut ::c_int) -> ::c_int;
}
cfg_if! {
if #[cfg(freebsd12)] {
mod freebsd12;
pub use self::freebsd12::*;
} else {
mod freebsd11;
pub use self::freebsd11::*;
}
}
cfg_if! {
if #[cfg(target_arch = "x86")] {
mod x86;
pub use self::x86::*;
} else if #[cfg(target_arch = "x86_64")] {
mod x86_64;
pub use self::x86_64::*;
} else if #[cfg(target_arch = "aarch64")] {
mod aarch64;
pub use self::aarch64::*;
} else if #[cfg(target_arch = "arm")] {
mod arm;
pub use self::arm::*;
} else if #[cfg(target_arch = "powerpc64")] {
mod powerpc64;
pub use self::powerpc64::*;
} else {
// Unknown target_arch
}
}
| 36.576583 | 80 | 0.626965 |
4a4f65ae98fed471b12db7608cbef10ca3be7b83 | 13,077 | // ======================================
// This file was automatically generated.
// ======================================
use serde_derive::{Deserialize, Serialize};
use crate::config::{Client, Response};
use crate::ids::TaxRateId;
use crate::params::{Expand, List, Metadata, Object, RangeQuery, Timestamp};
/// The resource representing a Stripe "TaxRate".
///
/// For more details see <https://stripe.com/docs/api/tax_rates/object>
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct TaxRate {
/// Unique identifier for the object.
pub id: TaxRateId,
/// Defaults to `true`.
///
/// When set to `false`, this tax rate cannot be used with new applications or Checkout Sessions, but will still work for subscriptions and invoices that already have it set.
pub active: bool,
/// Two-letter country code ([ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2)).
#[serde(skip_serializing_if = "Option::is_none")]
pub country: Option<String>,
/// Time at which the object was created.
///
/// Measured in seconds since the Unix epoch.
pub created: Timestamp,
/// An arbitrary string attached to the tax rate for your internal use only.
///
/// It will not be visible to your customers.
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
/// The display name of the tax rates as it will appear to your customer on their receipt email, PDF, and the hosted invoice page.
pub display_name: String,
/// This specifies if the tax rate is inclusive or exclusive.
pub inclusive: bool,
/// The jurisdiction for the tax rate.
///
/// You can use this label field for tax reporting purposes.
/// It also appears on your customer’s invoice.
#[serde(skip_serializing_if = "Option::is_none")]
pub jurisdiction: Option<String>,
/// Has the value `true` if the object exists in live mode or the value `false` if the object exists in test mode.
pub livemode: bool,
/// Set of [key-value pairs](https://stripe.com/docs/api/metadata) that you can attach to an object.
///
/// This can be useful for storing additional information about the object in a structured format.
#[serde(default)]
pub metadata: Metadata,
/// This represents the tax rate percent out of 100.
pub percentage: f64,
/// [ISO 3166-2 subdivision code](https://en.wikipedia.org/wiki/ISO_3166-2:US), without country prefix.
///
/// For example, "NY" for New York, United States.
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<String>,
/// The high-level tax type, such as `vat` or `sales_tax`.
#[serde(skip_serializing_if = "Option::is_none")]
pub tax_type: Option<TaxRateTaxType>,
}
impl TaxRate {
/// Returns a list of your tax rates.
///
/// Tax rates are returned sorted by creation date, with the most recently created tax rates appearing first.
pub fn list(client: &Client, params: ListTaxRates<'_>) -> Response<List<TaxRate>> {
client.get_query("/tax_rates", ¶ms)
}
/// Creates a new tax rate.
pub fn create(client: &Client, params: CreateTaxRate<'_>) -> Response<TaxRate> {
client.post_form("/tax_rates", ¶ms)
}
/// Retrieves a tax rate with the given ID.
pub fn retrieve(client: &Client, id: &TaxRateId, expand: &[&str]) -> Response<TaxRate> {
client.get_query(&format!("/tax_rates/{}", id), &Expand { expand })
}
/// Updates an existing tax rate.
pub fn update(client: &Client, id: &TaxRateId, params: UpdateTaxRate<'_>) -> Response<TaxRate> {
client.post_form(&format!("/tax_rates/{}", id), ¶ms)
}
}
impl Object for TaxRate {
type Id = TaxRateId;
fn id(&self) -> Self::Id {
self.id.clone()
}
fn object(&self) -> &'static str {
"tax_rate"
}
}
/// The parameters for `TaxRate::create`.
#[derive(Clone, Debug, Serialize)]
pub struct CreateTaxRate<'a> {
/// Flag determining whether the tax rate is active or inactive (archived).
///
/// Inactive tax rates cannot be used with new applications or Checkout Sessions, but will still work for subscriptions and invoices that already have it set.
#[serde(skip_serializing_if = "Option::is_none")]
pub active: Option<bool>,
/// Two-letter country code ([ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2)).
#[serde(skip_serializing_if = "Option::is_none")]
pub country: Option<&'a str>,
/// An arbitrary string attached to the tax rate for your internal use only.
///
/// It will not be visible to your customers.
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<&'a str>,
/// The display name of the tax rate, which will be shown to users.
pub display_name: &'a str,
/// Specifies which fields in the response should be expanded.
#[serde(skip_serializing_if = "Expand::is_empty")]
pub expand: &'a [&'a str],
/// This specifies if the tax rate is inclusive or exclusive.
pub inclusive: bool,
/// The jurisdiction for the tax rate.
///
/// You can use this label field for tax reporting purposes.
/// It also appears on your customer’s invoice.
#[serde(skip_serializing_if = "Option::is_none")]
pub jurisdiction: Option<&'a str>,
/// Set of [key-value pairs](https://stripe.com/docs/api/metadata) that you can attach to an object.
///
/// This can be useful for storing additional information about the object in a structured format.
/// Individual keys can be unset by posting an empty value to them.
/// All keys can be unset by posting an empty value to `metadata`.
#[serde(skip_serializing_if = "Option::is_none")]
pub metadata: Option<Metadata>,
/// This represents the tax rate percent out of 100.
pub percentage: f64,
/// [ISO 3166-2 subdivision code](https://en.wikipedia.org/wiki/ISO_3166-2:US), without country prefix.
///
/// For example, "NY" for New York, United States.
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<&'a str>,
/// The high-level tax type, such as `vat` or `sales_tax`.
#[serde(skip_serializing_if = "Option::is_none")]
pub tax_type: Option<TaxRateTaxType>,
}
impl<'a> CreateTaxRate<'a> {
pub fn new(display_name: &'a str, percentage: f64) -> Self {
CreateTaxRate {
active: Default::default(),
country: Default::default(),
description: Default::default(),
display_name,
expand: Default::default(),
inclusive: Default::default(),
jurisdiction: Default::default(),
metadata: Default::default(),
percentage,
state: Default::default(),
tax_type: Default::default(),
}
}
}
/// The parameters for `TaxRate::list`.
#[derive(Clone, Debug, Serialize, Default)]
pub struct ListTaxRates<'a> {
/// Optional flag to filter by tax rates that are either active or inactive (archived).
#[serde(skip_serializing_if = "Option::is_none")]
pub active: Option<bool>,
/// Optional range for filtering created date.
#[serde(skip_serializing_if = "Option::is_none")]
pub created: Option<RangeQuery<Timestamp>>,
/// A cursor for use in pagination.
///
/// `ending_before` is an object ID that defines your place in the list.
/// For instance, if you make a list request and receive 100 objects, starting with `obj_bar`, your subsequent call can include `ending_before=obj_bar` in order to fetch the previous page of the list.
#[serde(skip_serializing_if = "Option::is_none")]
pub ending_before: Option<TaxRateId>,
/// Specifies which fields in the response should be expanded.
#[serde(skip_serializing_if = "Expand::is_empty")]
pub expand: &'a [&'a str],
/// Optional flag to filter by tax rates that are inclusive (or those that are not inclusive).
#[serde(skip_serializing_if = "Option::is_none")]
pub inclusive: Option<bool>,
/// A limit on the number of objects to be returned.
///
/// Limit can range between 1 and 100, and the default is 10.
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<u64>,
/// A cursor for use in pagination.
///
/// `starting_after` is an object ID that defines your place in the list.
/// For instance, if you make a list request and receive 100 objects, ending with `obj_foo`, your subsequent call can include `starting_after=obj_foo` in order to fetch the next page of the list.
#[serde(skip_serializing_if = "Option::is_none")]
pub starting_after: Option<TaxRateId>,
}
impl<'a> ListTaxRates<'a> {
pub fn new() -> Self {
ListTaxRates {
active: Default::default(),
created: Default::default(),
ending_before: Default::default(),
expand: Default::default(),
inclusive: Default::default(),
limit: Default::default(),
starting_after: Default::default(),
}
}
}
/// The parameters for `TaxRate::update`.
#[derive(Clone, Debug, Serialize, Default)]
pub struct UpdateTaxRate<'a> {
/// Flag determining whether the tax rate is active or inactive (archived).
///
/// Inactive tax rates cannot be used with new applications or Checkout Sessions, but will still work for subscriptions and invoices that already have it set.
#[serde(skip_serializing_if = "Option::is_none")]
pub active: Option<bool>,
/// Two-letter country code ([ISO 3166-1 alpha-2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2)).
#[serde(skip_serializing_if = "Option::is_none")]
pub country: Option<&'a str>,
/// An arbitrary string attached to the tax rate for your internal use only.
///
/// It will not be visible to your customers.
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<&'a str>,
/// The display name of the tax rate, which will be shown to users.
#[serde(skip_serializing_if = "Option::is_none")]
pub display_name: Option<&'a str>,
/// Specifies which fields in the response should be expanded.
#[serde(skip_serializing_if = "Expand::is_empty")]
pub expand: &'a [&'a str],
/// The jurisdiction for the tax rate.
///
/// You can use this label field for tax reporting purposes.
/// It also appears on your customer’s invoice.
#[serde(skip_serializing_if = "Option::is_none")]
pub jurisdiction: Option<&'a str>,
/// Set of [key-value pairs](https://stripe.com/docs/api/metadata) that you can attach to an object.
///
/// This can be useful for storing additional information about the object in a structured format.
/// Individual keys can be unset by posting an empty value to them.
/// All keys can be unset by posting an empty value to `metadata`.
#[serde(skip_serializing_if = "Option::is_none")]
pub metadata: Option<Metadata>,
/// [ISO 3166-2 subdivision code](https://en.wikipedia.org/wiki/ISO_3166-2:US), without country prefix.
///
/// For example, "NY" for New York, United States.
#[serde(skip_serializing_if = "Option::is_none")]
pub state: Option<&'a str>,
/// The high-level tax type, such as `vat` or `sales_tax`.
#[serde(skip_serializing_if = "Option::is_none")]
pub tax_type: Option<TaxRateTaxType>,
}
impl<'a> UpdateTaxRate<'a> {
pub fn new() -> Self {
UpdateTaxRate {
active: Default::default(),
country: Default::default(),
description: Default::default(),
display_name: Default::default(),
expand: Default::default(),
jurisdiction: Default::default(),
metadata: Default::default(),
state: Default::default(),
tax_type: Default::default(),
}
}
}
/// An enum representing the possible values of an `TaxRate`'s `tax_type` field.
#[derive(Copy, Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum TaxRateTaxType {
Gst,
Hst,
Jct,
Pst,
Qst,
Rst,
SalesTax,
Vat,
}
impl TaxRateTaxType {
pub fn as_str(self) -> &'static str {
match self {
TaxRateTaxType::Gst => "gst",
TaxRateTaxType::Hst => "hst",
TaxRateTaxType::Jct => "jct",
TaxRateTaxType::Pst => "pst",
TaxRateTaxType::Qst => "qst",
TaxRateTaxType::Rst => "rst",
TaxRateTaxType::SalesTax => "sales_tax",
TaxRateTaxType::Vat => "vat",
}
}
}
impl AsRef<str> for TaxRateTaxType {
fn as_ref(&self) -> &str {
self.as_str()
}
}
impl std::fmt::Display for TaxRateTaxType {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
self.as_str().fmt(f)
}
}
impl std::default::Default for TaxRateTaxType {
fn default() -> Self {
Self::Gst
}
}
| 37.045326 | 204 | 0.642043 |
e48640e1bb965720eb7848c422220f2a890cc350 | 49,022 | //! Mono Item Collection
//! ====================
//!
//! This module is responsible for discovering all items that will contribute
//! to code generation of the crate. The important part here is that it not only
//! needs to find syntax-level items (functions, structs, etc) but also all
//! their monomorphized instantiations. Every non-generic, non-const function
//! maps to one LLVM artifact. Every generic function can produce
//! from zero to N artifacts, depending on the sets of type arguments it
//! is instantiated with.
//! This also applies to generic items from other crates: A generic definition
//! in crate X might produce monomorphizations that are compiled into crate Y.
//! We also have to collect these here.
//!
//! The following kinds of "mono items" are handled here:
//!
//! - Functions
//! - Methods
//! - Closures
//! - Statics
//! - Drop glue
//!
//! The following things also result in LLVM artifacts, but are not collected
//! here, since we instantiate them locally on demand when needed in a given
//! codegen unit:
//!
//! - Constants
//! - Vtables
//! - Object Shims
//!
//!
//! General Algorithm
//! -----------------
//! Let's define some terms first:
//!
//! - A "mono item" is something that results in a function or global in
//! the LLVM IR of a codegen unit. Mono items do not stand on their
//! own, they can reference other mono items. For example, if function
//! `foo()` calls function `bar()` then the mono item for `foo()`
//! references the mono item for function `bar()`. In general, the
//! definition for mono item A referencing a mono item B is that
//! the LLVM artifact produced for A references the LLVM artifact produced
//! for B.
//!
//! - Mono items and the references between them form a directed graph,
//! where the mono items are the nodes and references form the edges.
//! Let's call this graph the "mono item graph".
//!
//! - The mono item graph for a program contains all mono items
//! that are needed in order to produce the complete LLVM IR of the program.
//!
//! The purpose of the algorithm implemented in this module is to build the
//! mono item graph for the current crate. It runs in two phases:
//!
//! 1. Discover the roots of the graph by traversing the HIR of the crate.
//! 2. Starting from the roots, find neighboring nodes by inspecting the MIR
//! representation of the item corresponding to a given node, until no more
//! new nodes are found.
//!
//! ### Discovering roots
//!
//! The roots of the mono item graph correspond to the non-generic
//! syntactic items in the source code. We find them by walking the HIR of the
//! crate, and whenever we hit upon a function, method, or static item, we
//! create a mono item consisting of the items DefId and, since we only
//! consider non-generic items, an empty type-substitution set.
//!
//! ### Finding neighbor nodes
//! Given a mono item node, we can discover neighbors by inspecting its
//! MIR. We walk the MIR and any time we hit upon something that signifies a
//! reference to another mono item, we have found a neighbor. Since the
//! mono item we are currently at is always monomorphic, we also know the
//! concrete type arguments of its neighbors, and so all neighbors again will be
//! monomorphic. The specific forms a reference to a neighboring node can take
//! in MIR are quite diverse. Here is an overview:
//!
//! #### Calling Functions/Methods
//! The most obvious form of one mono item referencing another is a
//! function or method call (represented by a CALL terminator in MIR). But
//! calls are not the only thing that might introduce a reference between two
//! function mono items, and as we will see below, they are just a
//! specialization of the form described next, and consequently will not get any
//! special treatment in the algorithm.
//!
//! #### Taking a reference to a function or method
//! A function does not need to actually be called in order to be a neighbor of
//! another function. It suffices to just take a reference in order to introduce
//! an edge. Consider the following example:
//!
//! ```rust
//! fn print_val<T: Display>(x: T) {
//! println!("{}", x);
//! }
//!
//! fn call_fn(f: &Fn(i32), x: i32) {
//! f(x);
//! }
//!
//! fn main() {
//! let print_i32 = print_val::<i32>;
//! call_fn(&print_i32, 0);
//! }
//! ```
//! The MIR of none of these functions will contain an explicit call to
//! `print_val::<i32>`. Nonetheless, in order to mono this program, we need
//! an instance of this function. Thus, whenever we encounter a function or
//! method in operand position, we treat it as a neighbor of the current
//! mono item. Calls are just a special case of that.
//!
//! #### Closures
//! In a way, closures are a simple case. Since every closure object needs to be
//! constructed somewhere, we can reliably discover them by observing
//! `RValue::Aggregate` expressions with `AggregateKind::Closure`. This is also
//! true for closures inlined from other crates.
//!
//! #### Drop glue
//! Drop glue mono items are introduced by MIR drop-statements. The
//! generated mono item will again have drop-glue item neighbors if the
//! type to be dropped contains nested values that also need to be dropped. It
//! might also have a function item neighbor for the explicit `Drop::drop`
//! implementation of its type.
//!
//! #### Unsizing Casts
//! A subtle way of introducing neighbor edges is by casting to a trait object.
//! Since the resulting fat-pointer contains a reference to a vtable, we need to
//! instantiate all object-save methods of the trait, as we need to store
//! pointers to these functions even if they never get called anywhere. This can
//! be seen as a special case of taking a function reference.
//!
//! #### Boxes
//! Since `Box` expression have special compiler support, no explicit calls to
//! `exchange_malloc()` and `box_free()` may show up in MIR, even if the
//! compiler will generate them. We have to observe `Rvalue::Box` expressions
//! and Box-typed drop-statements for that purpose.
//!
//!
//! Interaction with Cross-Crate Inlining
//! -------------------------------------
//! The binary of a crate will not only contain machine code for the items
//! defined in the source code of that crate. It will also contain monomorphic
//! instantiations of any extern generic functions and of functions marked with
//! `#[inline]`.
//! The collection algorithm handles this more or less mono. If it is
//! about to create a mono item for something with an external `DefId`,
//! it will take a look if the MIR for that item is available, and if so just
//! proceed normally. If the MIR is not available, it assumes that the item is
//! just linked to and no node is created; which is exactly what we want, since
//! no machine code should be generated in the current crate for such an item.
//!
//! Eager and Lazy Collection Mode
//! ------------------------------
//! Mono item collection can be performed in one of two modes:
//!
//! - Lazy mode means that items will only be instantiated when actually
//! referenced. The goal is to produce the least amount of machine code
//! possible.
//!
//! - Eager mode is meant to be used in conjunction with incremental compilation
//! where a stable set of mono items is more important than a minimal
//! one. Thus, eager mode will instantiate drop-glue for every drop-able type
//! in the crate, even if no drop call for that type exists (yet). It will
//! also instantiate default implementations of trait methods, something that
//! otherwise is only done on demand.
//!
//!
//! Open Issues
//! -----------
//! Some things are not yet fully implemented in the current version of this
//! module.
//!
//! ### Const Fns
//! Ideally, no mono item should be generated for const fns unless there
//! is a call to them that cannot be evaluated at compile time. At the moment
//! this is not implemented however: a mono item will be produced
//! regardless of whether it is actually needed or not.
use crate::monomorphize;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::sync::{par_iter, MTLock, MTRef, ParallelIterator};
use rustc_errors::{ErrorReported, FatalError};
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId, LOCAL_CRATE};
use rustc_hir::itemlikevisit::ItemLikeVisitor;
use rustc_hir::lang_items::LangItem;
use rustc_index::bit_set::GrowableBitSet;
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
use rustc_middle::mir::interpret::{AllocId, ConstValue};
use rustc_middle::mir::interpret::{ErrorHandled, GlobalAlloc, Scalar};
use rustc_middle::mir::mono::{InstantiationMode, MonoItem};
use rustc_middle::mir::visit::Visitor as MirVisitor;
use rustc_middle::mir::{self, Local, Location};
use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCast};
use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
use rustc_middle::ty::{self, GenericParamDefKind, Instance, Ty, TyCtxt, TypeFoldable};
use rustc_session::config::EntryFnType;
use rustc_span::source_map::{dummy_spanned, respan, Span, Spanned, DUMMY_SP};
use smallvec::SmallVec;
use std::iter;
use std::ops::Range;
use std::path::PathBuf;
#[derive(PartialEq)]
pub enum MonoItemCollectionMode {
Eager,
Lazy,
}
/// Maps every mono item to all mono items it references in its
/// body.
pub struct InliningMap<'tcx> {
// Maps a source mono item to the range of mono items
// accessed by it.
// The range selects elements within the `targets` vecs.
index: FxHashMap<MonoItem<'tcx>, Range<usize>>,
targets: Vec<MonoItem<'tcx>>,
// Contains one bit per mono item in the `targets` field. That bit
// is true if that mono item needs to be inlined into every CGU.
inlines: GrowableBitSet<usize>,
}
impl<'tcx> InliningMap<'tcx> {
fn new() -> InliningMap<'tcx> {
InliningMap {
index: FxHashMap::default(),
targets: Vec::new(),
inlines: GrowableBitSet::with_capacity(1024),
}
}
fn record_accesses(&mut self, source: MonoItem<'tcx>, new_targets: &[(MonoItem<'tcx>, bool)]) {
let start_index = self.targets.len();
let new_items_count = new_targets.len();
let new_items_count_total = new_items_count + self.targets.len();
self.targets.reserve(new_items_count);
self.inlines.ensure(new_items_count_total);
for (i, (target, inline)) in new_targets.iter().enumerate() {
self.targets.push(*target);
if *inline {
self.inlines.insert(i + start_index);
}
}
let end_index = self.targets.len();
assert!(self.index.insert(source, start_index..end_index).is_none());
}
// Internally iterate over all items referenced by `source` which will be
// made available for inlining.
pub fn with_inlining_candidates<F>(&self, source: MonoItem<'tcx>, mut f: F)
where
F: FnMut(MonoItem<'tcx>),
{
if let Some(range) = self.index.get(&source) {
for (i, candidate) in self.targets[range.clone()].iter().enumerate() {
if self.inlines.contains(range.start + i) {
f(*candidate);
}
}
}
}
// Internally iterate over all items and the things each accesses.
pub fn iter_accesses<F>(&self, mut f: F)
where
F: FnMut(MonoItem<'tcx>, &[MonoItem<'tcx>]),
{
for (&accessor, range) in &self.index {
f(accessor, &self.targets[range.clone()])
}
}
}
pub fn collect_crate_mono_items(
tcx: TyCtxt<'_>,
mode: MonoItemCollectionMode,
) -> (FxHashSet<MonoItem<'_>>, InliningMap<'_>) {
let _prof_timer = tcx.prof.generic_activity("monomorphization_collector");
let roots =
tcx.sess.time("monomorphization_collector_root_collections", || collect_roots(tcx, mode));
debug!("building mono item graph, beginning at roots");
let mut visited = MTLock::new(FxHashSet::default());
let mut inlining_map = MTLock::new(InliningMap::new());
{
let visited: MTRef<'_, _> = &mut visited;
let inlining_map: MTRef<'_, _> = &mut inlining_map;
tcx.sess.time("monomorphization_collector_graph_walk", || {
par_iter(roots).for_each(|root| {
let mut recursion_depths = DefIdMap::default();
collect_items_rec(
tcx,
dummy_spanned(root),
visited,
&mut recursion_depths,
inlining_map,
);
});
});
}
(visited.into_inner(), inlining_map.into_inner())
}
// Find all non-generic items by walking the HIR. These items serve as roots to
// start monomorphizing from.
fn collect_roots(tcx: TyCtxt<'_>, mode: MonoItemCollectionMode) -> Vec<MonoItem<'_>> {
debug!("collecting roots");
let mut roots = Vec::new();
{
let entry_fn = tcx.entry_fn(LOCAL_CRATE);
debug!("collect_roots: entry_fn = {:?}", entry_fn);
let mut visitor = RootCollector { tcx, mode, entry_fn, output: &mut roots };
tcx.hir().krate().visit_all_item_likes(&mut visitor);
visitor.push_extra_entry_roots();
}
// We can only codegen items that are instantiable - items all of
// whose predicates hold. Luckily, items that aren't instantiable
// can't actually be used, so we can just skip codegenning them.
roots
.into_iter()
.filter_map(|root| root.node.is_instantiable(tcx).then_some(root.node))
.collect()
}
// Collect all monomorphized items reachable from `starting_point`
fn collect_items_rec<'tcx>(
tcx: TyCtxt<'tcx>,
starting_point: Spanned<MonoItem<'tcx>>,
visited: MTRef<'_, MTLock<FxHashSet<MonoItem<'tcx>>>>,
recursion_depths: &mut DefIdMap<usize>,
inlining_map: MTRef<'_, MTLock<InliningMap<'tcx>>>,
) {
if !visited.lock_mut().insert(starting_point.node) {
// We've been here already, no need to search again.
return;
}
debug!("BEGIN collect_items_rec({})", starting_point.node);
let mut neighbors = Vec::new();
let recursion_depth_reset;
match starting_point.node {
MonoItem::Static(def_id) => {
let instance = Instance::mono(tcx, def_id);
// Sanity check whether this ended up being collected accidentally
debug_assert!(should_codegen_locally(tcx, &instance));
let ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
visit_drop_use(tcx, ty, true, starting_point.span, &mut neighbors);
recursion_depth_reset = None;
if let Ok(alloc) = tcx.eval_static_initializer(def_id) {
for &((), id) in alloc.relocations().values() {
collect_miri(tcx, id, &mut neighbors);
}
}
}
MonoItem::Fn(instance) => {
// Sanity check whether this ended up being collected accidentally
debug_assert!(should_codegen_locally(tcx, &instance));
// Keep track of the monomorphization recursion depth
recursion_depth_reset =
Some(check_recursion_limit(tcx, instance, starting_point.span, recursion_depths));
check_type_length_limit(tcx, instance);
rustc_data_structures::stack::ensure_sufficient_stack(|| {
collect_neighbours(tcx, instance, &mut neighbors);
});
}
MonoItem::GlobalAsm(..) => {
recursion_depth_reset = None;
}
}
record_accesses(tcx, starting_point.node, neighbors.iter().map(|i| &i.node), inlining_map);
for neighbour in neighbors {
collect_items_rec(tcx, neighbour, visited, recursion_depths, inlining_map);
}
if let Some((def_id, depth)) = recursion_depth_reset {
recursion_depths.insert(def_id, depth);
}
debug!("END collect_items_rec({})", starting_point.node);
}
fn record_accesses<'a, 'tcx: 'a>(
tcx: TyCtxt<'tcx>,
caller: MonoItem<'tcx>,
callees: impl Iterator<Item = &'a MonoItem<'tcx>>,
inlining_map: MTRef<'_, MTLock<InliningMap<'tcx>>>,
) {
let is_inlining_candidate = |mono_item: &MonoItem<'tcx>| {
mono_item.instantiation_mode(tcx) == InstantiationMode::LocalCopy
};
// We collect this into a `SmallVec` to avoid calling `is_inlining_candidate` in the lock.
// FIXME: Call `is_inlining_candidate` when pushing to `neighbors` in `collect_items_rec`
// instead to avoid creating this `SmallVec`.
let accesses: SmallVec<[_; 128]> =
callees.map(|mono_item| (*mono_item, is_inlining_candidate(mono_item))).collect();
inlining_map.lock_mut().record_accesses(caller, &accesses);
}
/// Format instance name that is already known to be too long for rustc.
/// Show only the first and last 32 characters to avoid blasting
/// the user's terminal with thousands of lines of type-name.
///
/// If the type name is longer than before+after, it will be written to a file.
fn shrunk_instance_name(
tcx: TyCtxt<'tcx>,
instance: &Instance<'tcx>,
before: usize,
after: usize,
) -> (String, Option<PathBuf>) {
let s = instance.to_string();
// Only use the shrunk version if it's really shorter.
// This also avoids the case where before and after slices overlap.
if s.chars().nth(before + after + 1).is_some() {
// An iterator of all byte positions including the end of the string.
let positions = || s.char_indices().map(|(i, _)| i).chain(iter::once(s.len()));
let shrunk = format!(
"{before}...{after}",
before = &s[..positions().nth(before).unwrap_or(s.len())],
after = &s[positions().rev().nth(after).unwrap_or(0)..],
);
let path = tcx.output_filenames(LOCAL_CRATE).temp_path_ext("long-type.txt", None);
let written_to_path = std::fs::write(&path, s).ok().map(|_| path);
(shrunk, written_to_path)
} else {
(s, None)
}
}
fn check_recursion_limit<'tcx>(
tcx: TyCtxt<'tcx>,
instance: Instance<'tcx>,
span: Span,
recursion_depths: &mut DefIdMap<usize>,
) -> (DefId, usize) {
let def_id = instance.def_id();
let recursion_depth = recursion_depths.get(&def_id).cloned().unwrap_or(0);
debug!(" => recursion depth={}", recursion_depth);
let adjusted_recursion_depth = if Some(def_id) == tcx.lang_items().drop_in_place_fn() {
// HACK: drop_in_place creates tight monomorphization loops. Give
// it more margin.
recursion_depth / 4
} else {
recursion_depth
};
// Code that needs to instantiate the same function recursively
// more than the recursion limit is assumed to be causing an
// infinite expansion.
if !tcx.sess.recursion_limit().value_within_limit(adjusted_recursion_depth) {
let (shrunk, written_to_path) = shrunk_instance_name(tcx, &instance, 32, 32);
let error = format!("reached the recursion limit while instantiating `{}`", shrunk);
let mut err = tcx.sess.struct_span_fatal(span, &error);
err.span_note(
tcx.def_span(def_id),
&format!("`{}` defined here", tcx.def_path_str(def_id)),
);
if let Some(path) = written_to_path {
err.note(&format!("the full type name has been written to '{}'", path.display()));
}
err.emit();
FatalError.raise();
}
recursion_depths.insert(def_id, recursion_depth + 1);
(def_id, recursion_depth)
}
fn check_type_length_limit<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) {
let type_length = instance
.substs
.iter()
.flat_map(|arg| arg.walk())
.filter(|arg| match arg.unpack() {
GenericArgKind::Type(_) | GenericArgKind::Const(_) => true,
GenericArgKind::Lifetime(_) => false,
})
.count();
debug!(" => type length={}", type_length);
// Rust code can easily create exponentially-long types using only a
// polynomial recursion depth. Even with the default recursion
// depth, you can easily get cases that take >2^60 steps to run,
// which means that rustc basically hangs.
//
// Bail out in these cases to avoid that bad user experience.
if !tcx.sess.type_length_limit().value_within_limit(type_length) {
let (shrunk, written_to_path) = shrunk_instance_name(tcx, &instance, 32, 32);
let msg = format!("reached the type-length limit while instantiating `{}`", shrunk);
let mut diag = tcx.sess.struct_span_fatal(tcx.def_span(instance.def_id()), &msg);
if let Some(path) = written_to_path {
diag.note(&format!("the full type name has been written to '{}'", path.display()));
}
diag.help(&format!(
"consider adding a `#![type_length_limit=\"{}\"]` attribute to your crate",
type_length
));
diag.emit();
tcx.sess.abort_if_errors();
}
}
struct MirNeighborCollector<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a mir::Body<'tcx>,
output: &'a mut Vec<Spanned<MonoItem<'tcx>>>,
instance: Instance<'tcx>,
}
impl<'a, 'tcx> MirNeighborCollector<'a, 'tcx> {
pub fn monomorphize<T>(&self, value: T) -> T
where
T: TypeFoldable<'tcx>,
{
debug!("monomorphize: self.instance={:?}", self.instance);
self.instance.subst_mir_and_normalize_erasing_regions(
self.tcx,
ty::ParamEnv::reveal_all(),
value,
)
}
}
impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
debug!("visiting rvalue {:?}", *rvalue);
let span = self.body.source_info(location).span;
match *rvalue {
// When doing an cast from a regular pointer to a fat pointer, we
// have to instantiate all methods of the trait being cast to, so we
// can build the appropriate vtable.
mir::Rvalue::Cast(
mir::CastKind::Pointer(PointerCast::Unsize),
ref operand,
target_ty,
) => {
let target_ty = self.monomorphize(target_ty);
let source_ty = operand.ty(self.body, self.tcx);
let source_ty = self.monomorphize(source_ty);
let (source_ty, target_ty) =
find_vtable_types_for_unsizing(self.tcx, source_ty, target_ty);
// This could also be a different Unsize instruction, like
// from a fixed sized array to a slice. But we are only
// interested in things that produce a vtable.
if target_ty.is_trait() && !source_ty.is_trait() {
create_mono_items_for_vtable_methods(
self.tcx,
target_ty,
source_ty,
span,
self.output,
);
}
}
mir::Rvalue::Cast(
mir::CastKind::Pointer(PointerCast::ReifyFnPointer),
ref operand,
_,
) => {
let fn_ty = operand.ty(self.body, self.tcx);
let fn_ty = self.monomorphize(fn_ty);
visit_fn_use(self.tcx, fn_ty, false, span, &mut self.output);
}
mir::Rvalue::Cast(
mir::CastKind::Pointer(PointerCast::ClosureFnPointer(_)),
ref operand,
_,
) => {
let source_ty = operand.ty(self.body, self.tcx);
let source_ty = self.monomorphize(source_ty);
match *source_ty.kind() {
ty::Closure(def_id, substs) => {
let instance = Instance::resolve_closure(
self.tcx,
def_id,
substs,
ty::ClosureKind::FnOnce,
);
if should_codegen_locally(self.tcx, &instance) {
self.output.push(create_fn_mono_item(self.tcx, instance, span));
}
}
_ => bug!(),
}
}
mir::Rvalue::NullaryOp(mir::NullOp::Box, _) => {
let tcx = self.tcx;
let exchange_malloc_fn_def_id =
tcx.require_lang_item(LangItem::ExchangeMalloc, None);
let instance = Instance::mono(tcx, exchange_malloc_fn_def_id);
if should_codegen_locally(tcx, &instance) {
self.output.push(create_fn_mono_item(self.tcx, instance, span));
}
}
mir::Rvalue::ThreadLocalRef(def_id) => {
assert!(self.tcx.is_thread_local_static(def_id));
let instance = Instance::mono(self.tcx, def_id);
if should_codegen_locally(self.tcx, &instance) {
trace!("collecting thread-local static {:?}", def_id);
self.output.push(respan(span, MonoItem::Static(def_id)));
}
}
_ => { /* not interesting */ }
}
self.super_rvalue(rvalue, location);
}
fn visit_const(&mut self, constant: &&'tcx ty::Const<'tcx>, location: Location) {
debug!("visiting const {:?} @ {:?}", *constant, location);
let substituted_constant = self.monomorphize(*constant);
let param_env = ty::ParamEnv::reveal_all();
match substituted_constant.val {
ty::ConstKind::Value(val) => collect_const_value(self.tcx, val, self.output),
ty::ConstKind::Unevaluated(unevaluated) => {
match self.tcx.const_eval_resolve(param_env, unevaluated, None) {
Ok(val) => collect_const_value(self.tcx, val, self.output),
Err(ErrorHandled::Reported(ErrorReported) | ErrorHandled::Linted) => {}
Err(ErrorHandled::TooGeneric) => span_bug!(
self.body.source_info(location).span,
"collection encountered polymorphic constant: {}",
substituted_constant
),
}
}
_ => {}
}
self.super_const(constant);
}
fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) {
debug!("visiting terminator {:?} @ {:?}", terminator, location);
let source = self.body.source_info(location).span;
let tcx = self.tcx;
match terminator.kind {
mir::TerminatorKind::Call { ref func, .. } => {
let callee_ty = func.ty(self.body, tcx);
let callee_ty = self.monomorphize(callee_ty);
visit_fn_use(self.tcx, callee_ty, true, source, &mut self.output);
}
mir::TerminatorKind::Drop { ref place, .. }
| mir::TerminatorKind::DropAndReplace { ref place, .. } => {
let ty = place.ty(self.body, self.tcx).ty;
let ty = self.monomorphize(ty);
visit_drop_use(self.tcx, ty, true, source, self.output);
}
mir::TerminatorKind::InlineAsm { ref operands, .. } => {
for op in operands {
match *op {
mir::InlineAsmOperand::SymFn { ref value } => {
let fn_ty = self.monomorphize(value.literal.ty());
visit_fn_use(self.tcx, fn_ty, false, source, &mut self.output);
}
mir::InlineAsmOperand::SymStatic { def_id } => {
let instance = Instance::mono(self.tcx, def_id);
if should_codegen_locally(self.tcx, &instance) {
trace!("collecting asm sym static {:?}", def_id);
self.output.push(respan(source, MonoItem::Static(def_id)));
}
}
_ => {}
}
}
}
mir::TerminatorKind::Goto { .. }
| mir::TerminatorKind::SwitchInt { .. }
| mir::TerminatorKind::Resume
| mir::TerminatorKind::Abort
| mir::TerminatorKind::Return
| mir::TerminatorKind::Unreachable
| mir::TerminatorKind::Assert { .. } => {}
mir::TerminatorKind::GeneratorDrop
| mir::TerminatorKind::Yield { .. }
| mir::TerminatorKind::FalseEdge { .. }
| mir::TerminatorKind::FalseUnwind { .. } => bug!(),
}
self.super_terminator(terminator, location);
}
fn visit_local(
&mut self,
_place_local: &Local,
_context: mir::visit::PlaceContext,
_location: Location,
) {
}
}
fn visit_drop_use<'tcx>(
tcx: TyCtxt<'tcx>,
ty: Ty<'tcx>,
is_direct_call: bool,
source: Span,
output: &mut Vec<Spanned<MonoItem<'tcx>>>,
) {
let instance = Instance::resolve_drop_in_place(tcx, ty);
visit_instance_use(tcx, instance, is_direct_call, source, output);
}
fn visit_fn_use<'tcx>(
tcx: TyCtxt<'tcx>,
ty: Ty<'tcx>,
is_direct_call: bool,
source: Span,
output: &mut Vec<Spanned<MonoItem<'tcx>>>,
) {
if let ty::FnDef(def_id, substs) = *ty.kind() {
let instance = if is_direct_call {
ty::Instance::resolve(tcx, ty::ParamEnv::reveal_all(), def_id, substs).unwrap().unwrap()
} else {
ty::Instance::resolve_for_fn_ptr(tcx, ty::ParamEnv::reveal_all(), def_id, substs)
.unwrap()
};
visit_instance_use(tcx, instance, is_direct_call, source, output);
}
}
fn visit_instance_use<'tcx>(
tcx: TyCtxt<'tcx>,
instance: ty::Instance<'tcx>,
is_direct_call: bool,
source: Span,
output: &mut Vec<Spanned<MonoItem<'tcx>>>,
) {
debug!("visit_item_use({:?}, is_direct_call={:?})", instance, is_direct_call);
if !should_codegen_locally(tcx, &instance) {
return;
}
match instance.def {
ty::InstanceDef::Virtual(..) | ty::InstanceDef::Intrinsic(_) => {
if !is_direct_call {
bug!("{:?} being reified", instance);
}
}
ty::InstanceDef::DropGlue(_, None) => {
// Don't need to emit noop drop glue if we are calling directly.
if !is_direct_call {
output.push(create_fn_mono_item(tcx, instance, source));
}
}
ty::InstanceDef::DropGlue(_, Some(_))
| ty::InstanceDef::VtableShim(..)
| ty::InstanceDef::ReifyShim(..)
| ty::InstanceDef::ClosureOnceShim { .. }
| ty::InstanceDef::Item(..)
| ty::InstanceDef::FnPtrShim(..)
| ty::InstanceDef::CloneShim(..) => {
output.push(create_fn_mono_item(tcx, instance, source));
}
}
}
// Returns `true` if we should codegen an instance in the local crate.
// Returns `false` if we can just link to the upstream crate and therefore don't
// need a mono item.
fn should_codegen_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx>) -> bool {
let def_id = match instance.def {
ty::InstanceDef::Item(def) => def.did,
ty::InstanceDef::DropGlue(def_id, Some(_)) => def_id,
ty::InstanceDef::VtableShim(..)
| ty::InstanceDef::ReifyShim(..)
| ty::InstanceDef::ClosureOnceShim { .. }
| ty::InstanceDef::Virtual(..)
| ty::InstanceDef::FnPtrShim(..)
| ty::InstanceDef::DropGlue(..)
| ty::InstanceDef::Intrinsic(_)
| ty::InstanceDef::CloneShim(..) => return true,
};
if tcx.is_foreign_item(def_id) {
// Foreign items are always linked against, there's no way of instantiating them.
return false;
}
if def_id.is_local() {
// Local items cannot be referred to locally without monomorphizing them locally.
return true;
}
if tcx.is_reachable_non_generic(def_id)
|| instance.polymorphize(tcx).upstream_monomorphization(tcx).is_some()
{
// We can link to the item in question, no instance needed in this crate.
return false;
}
if !tcx.is_mir_available(def_id) {
bug!("no MIR available for {:?}", def_id);
}
true
}
/// For a given pair of source and target type that occur in an unsizing coercion,
/// this function finds the pair of types that determines the vtable linking
/// them.
///
/// For example, the source type might be `&SomeStruct` and the target type\
/// might be `&SomeTrait` in a cast like:
///
/// let src: &SomeStruct = ...;
/// let target = src as &SomeTrait;
///
/// Then the output of this function would be (SomeStruct, SomeTrait) since for
/// constructing the `target` fat-pointer we need the vtable for that pair.
///
/// Things can get more complicated though because there's also the case where
/// the unsized type occurs as a field:
///
/// ```rust
/// struct ComplexStruct<T: ?Sized> {
/// a: u32,
/// b: f64,
/// c: T
/// }
/// ```
///
/// In this case, if `T` is sized, `&ComplexStruct<T>` is a thin pointer. If `T`
/// is unsized, `&SomeStruct` is a fat pointer, and the vtable it points to is
/// for the pair of `T` (which is a trait) and the concrete type that `T` was
/// originally coerced from:
///
/// let src: &ComplexStruct<SomeStruct> = ...;
/// let target = src as &ComplexStruct<SomeTrait>;
///
/// Again, we want this `find_vtable_types_for_unsizing()` to provide the pair
/// `(SomeStruct, SomeTrait)`.
///
/// Finally, there is also the case of custom unsizing coercions, e.g., for
/// smart pointers such as `Rc` and `Arc`.
fn find_vtable_types_for_unsizing<'tcx>(
tcx: TyCtxt<'tcx>,
source_ty: Ty<'tcx>,
target_ty: Ty<'tcx>,
) -> (Ty<'tcx>, Ty<'tcx>) {
let ptr_vtable = |inner_source: Ty<'tcx>, inner_target: Ty<'tcx>| {
let param_env = ty::ParamEnv::reveal_all();
let type_has_metadata = |ty: Ty<'tcx>| -> bool {
if ty.is_sized(tcx.at(DUMMY_SP), param_env) {
return false;
}
let tail = tcx.struct_tail_erasing_lifetimes(ty, param_env);
match tail.kind() {
ty::Foreign(..) => false,
ty::Str | ty::Slice(..) | ty::Dynamic(..) => true,
_ => bug!("unexpected unsized tail: {:?}", tail),
}
};
if type_has_metadata(inner_source) {
(inner_source, inner_target)
} else {
tcx.struct_lockstep_tails_erasing_lifetimes(inner_source, inner_target, param_env)
}
};
match (&source_ty.kind(), &target_ty.kind()) {
(&ty::Ref(_, a, _), &ty::Ref(_, b, _) | &ty::RawPtr(ty::TypeAndMut { ty: b, .. }))
| (&ty::RawPtr(ty::TypeAndMut { ty: a, .. }), &ty::RawPtr(ty::TypeAndMut { ty: b, .. })) => {
ptr_vtable(a, b)
}
(&ty::Adt(def_a, _), &ty::Adt(def_b, _)) if def_a.is_box() && def_b.is_box() => {
ptr_vtable(source_ty.boxed_ty(), target_ty.boxed_ty())
}
(&ty::Adt(source_adt_def, source_substs), &ty::Adt(target_adt_def, target_substs)) => {
assert_eq!(source_adt_def, target_adt_def);
let CustomCoerceUnsized::Struct(coerce_index) =
monomorphize::custom_coerce_unsize_info(tcx, source_ty, target_ty);
let source_fields = &source_adt_def.non_enum_variant().fields;
let target_fields = &target_adt_def.non_enum_variant().fields;
assert!(
coerce_index < source_fields.len() && source_fields.len() == target_fields.len()
);
find_vtable_types_for_unsizing(
tcx,
source_fields[coerce_index].ty(tcx, source_substs),
target_fields[coerce_index].ty(tcx, target_substs),
)
}
_ => bug!(
"find_vtable_types_for_unsizing: invalid coercion {:?} -> {:?}",
source_ty,
target_ty
),
}
}
fn create_fn_mono_item<'tcx>(
tcx: TyCtxt<'tcx>,
instance: Instance<'tcx>,
source: Span,
) -> Spanned<MonoItem<'tcx>> {
debug!("create_fn_mono_item(instance={})", instance);
respan(source, MonoItem::Fn(instance.polymorphize(tcx)))
}
/// Creates a `MonoItem` for each method that is referenced by the vtable for
/// the given trait/impl pair.
fn create_mono_items_for_vtable_methods<'tcx>(
tcx: TyCtxt<'tcx>,
trait_ty: Ty<'tcx>,
impl_ty: Ty<'tcx>,
source: Span,
output: &mut Vec<Spanned<MonoItem<'tcx>>>,
) {
assert!(!trait_ty.has_escaping_bound_vars() && !impl_ty.has_escaping_bound_vars());
if let ty::Dynamic(ref trait_ty, ..) = trait_ty.kind() {
if let Some(principal) = trait_ty.principal() {
let poly_trait_ref = principal.with_self_ty(tcx, impl_ty);
assert!(!poly_trait_ref.has_escaping_bound_vars());
// Walk all methods of the trait, including those of its supertraits
let methods = tcx.vtable_methods(poly_trait_ref);
let methods = methods
.iter()
.cloned()
.filter_map(|method| method)
.map(|(def_id, substs)| {
ty::Instance::resolve_for_vtable(
tcx,
ty::ParamEnv::reveal_all(),
def_id,
substs,
)
.unwrap()
})
.filter(|&instance| should_codegen_locally(tcx, &instance))
.map(|item| create_fn_mono_item(tcx, item, source));
output.extend(methods);
}
// Also add the destructor.
visit_drop_use(tcx, impl_ty, false, source, output);
}
}
//=-----------------------------------------------------------------------------
// Root Collection
//=-----------------------------------------------------------------------------
struct RootCollector<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
mode: MonoItemCollectionMode,
output: &'a mut Vec<Spanned<MonoItem<'tcx>>>,
entry_fn: Option<(LocalDefId, EntryFnType)>,
}
impl ItemLikeVisitor<'v> for RootCollector<'_, 'v> {
fn visit_item(&mut self, item: &'v hir::Item<'v>) {
match item.kind {
hir::ItemKind::ExternCrate(..)
| hir::ItemKind::Use(..)
| hir::ItemKind::ForeignMod { .. }
| hir::ItemKind::TyAlias(..)
| hir::ItemKind::Trait(..)
| hir::ItemKind::TraitAlias(..)
| hir::ItemKind::OpaqueTy(..)
| hir::ItemKind::Mod(..) => {
// Nothing to do, just keep recursing.
}
hir::ItemKind::Impl { .. } => {
if self.mode == MonoItemCollectionMode::Eager {
create_mono_items_for_default_impls(self.tcx, item, self.output);
}
}
hir::ItemKind::Enum(_, ref generics)
| hir::ItemKind::Struct(_, ref generics)
| hir::ItemKind::Union(_, ref generics) => {
if generics.params.is_empty() {
if self.mode == MonoItemCollectionMode::Eager {
debug!(
"RootCollector: ADT drop-glue for {}",
self.tcx.def_path_str(item.def_id.to_def_id())
);
let ty = Instance::new(item.def_id.to_def_id(), InternalSubsts::empty())
.ty(self.tcx, ty::ParamEnv::reveal_all());
visit_drop_use(self.tcx, ty, true, DUMMY_SP, self.output);
}
}
}
hir::ItemKind::GlobalAsm(..) => {
debug!(
"RootCollector: ItemKind::GlobalAsm({})",
self.tcx.def_path_str(item.def_id.to_def_id())
);
self.output.push(dummy_spanned(MonoItem::GlobalAsm(item.item_id())));
}
hir::ItemKind::Static(..) => {
debug!(
"RootCollector: ItemKind::Static({})",
self.tcx.def_path_str(item.def_id.to_def_id())
);
self.output.push(dummy_spanned(MonoItem::Static(item.def_id.to_def_id())));
}
hir::ItemKind::Const(..) => {
// const items only generate mono items if they are
// actually used somewhere. Just declaring them is insufficient.
// but even just declaring them must collect the items they refer to
if let Ok(val) = self.tcx.const_eval_poly(item.def_id.to_def_id()) {
collect_const_value(self.tcx, val, &mut self.output);
}
}
hir::ItemKind::Fn(..) => {
self.push_if_root(item.def_id);
}
}
}
fn visit_trait_item(&mut self, _: &'v hir::TraitItem<'v>) {
// Even if there's a default body with no explicit generics,
// it's still generic over some `Self: Trait`, so not a root.
}
fn visit_impl_item(&mut self, ii: &'v hir::ImplItem<'v>) {
if let hir::ImplItemKind::Fn(hir::FnSig { .. }, _) = ii.kind {
self.push_if_root(ii.def_id);
}
}
fn visit_foreign_item(&mut self, _foreign_item: &'v hir::ForeignItem<'v>) {}
}
impl RootCollector<'_, 'v> {
fn is_root(&self, def_id: LocalDefId) -> bool {
!item_requires_monomorphization(self.tcx, def_id)
&& match self.mode {
MonoItemCollectionMode::Eager => true,
MonoItemCollectionMode::Lazy => {
self.entry_fn.map(|(id, _)| id) == Some(def_id)
|| self.tcx.is_reachable_non_generic(def_id)
|| self
.tcx
.codegen_fn_attrs(def_id)
.flags
.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
}
}
}
/// If `def_id` represents a root, pushes it onto the list of
/// outputs. (Note that all roots must be monomorphic.)
fn push_if_root(&mut self, def_id: LocalDefId) {
if self.is_root(def_id) {
debug!("RootCollector::push_if_root: found root def_id={:?}", def_id);
let instance = Instance::mono(self.tcx, def_id.to_def_id());
self.output.push(create_fn_mono_item(self.tcx, instance, DUMMY_SP));
}
}
/// As a special case, when/if we encounter the
/// `main()` function, we also have to generate a
/// monomorphized copy of the start lang item based on
/// the return type of `main`. This is not needed when
/// the user writes their own `start` manually.
fn push_extra_entry_roots(&mut self) {
let main_def_id = match self.entry_fn {
Some((def_id, EntryFnType::Main)) => def_id,
_ => return,
};
let start_def_id = match self.tcx.lang_items().require(LangItem::Start) {
Ok(s) => s,
Err(err) => self.tcx.sess.fatal(&err),
};
let main_ret_ty = self.tcx.fn_sig(main_def_id).output();
// Given that `main()` has no arguments,
// then its return type cannot have
// late-bound regions, since late-bound
// regions must appear in the argument
// listing.
let main_ret_ty = self.tcx.erase_regions(main_ret_ty.no_bound_vars().unwrap());
let start_instance = Instance::resolve(
self.tcx,
ty::ParamEnv::reveal_all(),
start_def_id,
self.tcx.intern_substs(&[main_ret_ty.into()]),
)
.unwrap()
.unwrap();
self.output.push(create_fn_mono_item(self.tcx, start_instance, DUMMY_SP));
}
}
fn item_requires_monomorphization(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
let generics = tcx.generics_of(def_id);
generics.requires_monomorphization(tcx)
}
fn create_mono_items_for_default_impls<'tcx>(
tcx: TyCtxt<'tcx>,
item: &'tcx hir::Item<'tcx>,
output: &mut Vec<Spanned<MonoItem<'tcx>>>,
) {
match item.kind {
hir::ItemKind::Impl(ref impl_) => {
for param in impl_.generics.params {
match param.kind {
hir::GenericParamKind::Lifetime { .. } => {}
hir::GenericParamKind::Type { .. } | hir::GenericParamKind::Const { .. } => {
return;
}
}
}
debug!(
"create_mono_items_for_default_impls(item={})",
tcx.def_path_str(item.def_id.to_def_id())
);
if let Some(trait_ref) = tcx.impl_trait_ref(item.def_id) {
let param_env = ty::ParamEnv::reveal_all();
let trait_ref = tcx.normalize_erasing_regions(param_env, trait_ref);
let overridden_methods: FxHashSet<_> =
impl_.items.iter().map(|iiref| iiref.ident.normalize_to_macros_2_0()).collect();
for method in tcx.provided_trait_methods(trait_ref.def_id) {
if overridden_methods.contains(&method.ident.normalize_to_macros_2_0()) {
continue;
}
if tcx.generics_of(method.def_id).own_requires_monomorphization() {
continue;
}
let substs =
InternalSubsts::for_item(tcx, method.def_id, |param, _| match param.kind {
GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(),
GenericParamDefKind::Type { .. } | GenericParamDefKind::Const => {
trait_ref.substs[param.index as usize]
}
});
let instance = ty::Instance::resolve(tcx, param_env, method.def_id, substs)
.unwrap()
.unwrap();
let mono_item = create_fn_mono_item(tcx, instance, DUMMY_SP);
if mono_item.node.is_instantiable(tcx) && should_codegen_locally(tcx, &instance)
{
output.push(mono_item);
}
}
}
}
_ => bug!(),
}
}
/// Scans the miri alloc in order to find function calls, closures, and drop-glue.
fn collect_miri<'tcx>(
tcx: TyCtxt<'tcx>,
alloc_id: AllocId,
output: &mut Vec<Spanned<MonoItem<'tcx>>>,
) {
match tcx.global_alloc(alloc_id) {
GlobalAlloc::Static(def_id) => {
assert!(!tcx.is_thread_local_static(def_id));
let instance = Instance::mono(tcx, def_id);
if should_codegen_locally(tcx, &instance) {
trace!("collecting static {:?}", def_id);
output.push(dummy_spanned(MonoItem::Static(def_id)));
}
}
GlobalAlloc::Memory(alloc) => {
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
for &((), inner) in alloc.relocations().values() {
rustc_data_structures::stack::ensure_sufficient_stack(|| {
collect_miri(tcx, inner, output);
});
}
}
GlobalAlloc::Function(fn_instance) => {
if should_codegen_locally(tcx, &fn_instance) {
trace!("collecting {:?} with {:#?}", alloc_id, fn_instance);
output.push(create_fn_mono_item(tcx, fn_instance, DUMMY_SP));
}
}
}
}
/// Scans the MIR in order to find function calls, closures, and drop-glue.
fn collect_neighbours<'tcx>(
tcx: TyCtxt<'tcx>,
instance: Instance<'tcx>,
output: &mut Vec<Spanned<MonoItem<'tcx>>>,
) {
debug!("collect_neighbours: {:?}", instance.def_id());
let body = tcx.instance_mir(instance.def);
MirNeighborCollector { tcx, body: &body, output, instance }.visit_body(&body);
}
fn collect_const_value<'tcx>(
tcx: TyCtxt<'tcx>,
value: ConstValue<'tcx>,
output: &mut Vec<Spanned<MonoItem<'tcx>>>,
) {
match value {
ConstValue::Scalar(Scalar::Ptr(ptr)) => collect_miri(tcx, ptr.alloc_id, output),
ConstValue::Slice { data: alloc, start: _, end: _ } | ConstValue::ByRef { alloc, .. } => {
for &((), id) in alloc.relocations().values() {
collect_miri(tcx, id, output);
}
}
_ => {}
}
}
| 38.999204 | 101 | 0.586696 |
9b85708f34c2eee115c1605a0a337e856c2f12a2 | 2,480 | use types::{BeaconBlock, BeaconState, Checkpoint, EthSpec, Hash256, Slot};
/// Approximates the `Store` in "Ethereum 2.0 Phase 0 -- Beacon Chain Fork Choice":
///
/// https://github.com/ethereum/eth2.0-specs/blob/v0.12.1/specs/phase0/fork-choice.md#store
///
/// ## Detail
///
/// This is only an approximation for two reasons:
///
/// - This crate stores the actual block DAG in `ProtoArrayForkChoice`.
/// - `time` is represented using `Slot` instead of UNIX epoch `u64`.
///
/// ## Motiviation
///
/// The primary motivation for defining this as a trait to be implemented upstream rather than a
/// concrete struct is to allow this crate to be free from "impure" on-disk database logic,
/// hopefully making auditing easier.
pub trait ForkChoiceStore<T: EthSpec>: Sized {
type Error;
/// Returns the last value passed to `Self::set_current_slot`.
fn get_current_slot(&self) -> Slot;
/// Set the value to be returned by `Self::get_current_slot`.
///
/// ## Notes
///
/// This should only ever be called from within `ForkChoice::on_tick`.
fn set_current_slot(&mut self, slot: Slot);
/// Called whenever `ForkChoice::on_block` has verified a block, but not yet added it to fork
/// choice. Allows the implementer to performing caching or other housekeeping duties.
fn on_verified_block(
&mut self,
block: &BeaconBlock<T>,
block_root: Hash256,
state: &BeaconState<T>,
) -> Result<(), Self::Error>;
/// Returns the `justified_checkpoint`.
fn justified_checkpoint(&self) -> &Checkpoint;
/// Returns balances from the `state` identified by `justified_checkpoint.root`.
fn justified_balances(&self) -> &[u64];
/// Returns the `best_justified_checkpoint`.
fn best_justified_checkpoint(&self) -> &Checkpoint;
/// Returns the `finalized_checkpoint`.
fn finalized_checkpoint(&self) -> &Checkpoint;
/// Returns the `proposer_boost_root`.
fn proposer_boost_root(&self) -> Hash256;
/// Sets `finalized_checkpoint`.
fn set_finalized_checkpoint(&mut self, checkpoint: Checkpoint);
/// Sets the `justified_checkpoint`.
fn set_justified_checkpoint(&mut self, checkpoint: Checkpoint) -> Result<(), Self::Error>;
/// Sets the `best_justified_checkpoint`.
fn set_best_justified_checkpoint(&mut self, checkpoint: Checkpoint);
/// Sets the proposer boost root.
fn set_proposer_boost_root(&mut self, proposer_boost_root: Hash256);
}
| 36.470588 | 97 | 0.688306 |
167756d770dab651ab7e58c85eac2c4ca4bffab9 | 37,747 | // Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use std::cell::RefCell;
use std::mem;
use std::rc::Rc;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
use crossbeam::atomic::AtomicCell;
#[cfg(feature = "prost-codec")]
use kvproto::cdcpb::{
event::{
row::OpType as EventRowOpType, Entries as EventEntries, Event as Event_oneof_event,
LogType as EventLogType, Row as EventRow,
},
Compatibility, DuplicateRequest as ErrorDuplicateRequest, Error as EventError, Event,
};
#[cfg(not(feature = "prost-codec"))]
use kvproto::cdcpb::{
Compatibility, DuplicateRequest as ErrorDuplicateRequest, Error as EventError, Event,
EventEntries, EventLogType, EventRow, EventRowOpType, Event_oneof_event,
};
use kvproto::errorpb;
use kvproto::kvrpcpb::ExtraOp as TxnExtraOp;
use kvproto::metapb::{Region, RegionEpoch};
use kvproto::raft_cmdpb::{AdminCmdType, AdminRequest, AdminResponse, CmdType, Request};
use raftstore::coprocessor::{Cmd, CmdBatch};
use raftstore::store::fsm::ObserveID;
use raftstore::store::util::compare_region_epoch;
use raftstore::Error as RaftStoreError;
use resolved_ts::Resolver;
use tikv::storage::txn::TxnEntry;
use tikv::storage::Statistics;
use tikv_util::collections::HashMap;
use tikv_util::mpsc::batch::Sender as BatchSender;
use tikv_util::time::Instant;
use txn_types::{Key, Lock, LockType, TimeStamp, WriteRef, WriteType};
use crate::endpoint::{OldValueCache, OldValueCallback};
use crate::metrics::*;
use crate::service::{CdcEvent, ConnID};
use crate::{Error, Result};
const EVENT_MAX_SIZE: usize = 6 * 1024 * 1024; // 6MB
static DOWNSTREAM_ID_ALLOC: AtomicUsize = AtomicUsize::new(0);
/// A unique identifier of a Downstream.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub struct DownstreamID(usize);
impl DownstreamID {
pub fn new() -> DownstreamID {
DownstreamID(DOWNSTREAM_ID_ALLOC.fetch_add(1, Ordering::SeqCst))
}
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum DownstreamState {
Uninitialized,
Normal,
Stopped,
}
impl Default for DownstreamState {
fn default() -> Self {
Self::Uninitialized
}
}
#[derive(Clone)]
pub struct Downstream {
// TODO: include cdc request.
/// A unique identifier of the Downstream.
id: DownstreamID,
// The reqeust ID set by CDC to identify events corresponding different requests.
req_id: u64,
conn_id: ConnID,
// The IP address of downstream.
peer: String,
region_epoch: RegionEpoch,
sink: Option<BatchSender<CdcEvent>>,
state: Arc<AtomicCell<DownstreamState>>,
}
impl Downstream {
/// Create a Downsteam.
///
/// peer is the address of the downstream.
/// sink sends data to the downstream.
pub fn new(
peer: String,
region_epoch: RegionEpoch,
req_id: u64,
conn_id: ConnID,
) -> Downstream {
Downstream {
id: DownstreamID::new(),
req_id,
conn_id,
peer,
region_epoch,
sink: None,
state: Arc::new(AtomicCell::new(DownstreamState::default())),
}
}
/// Sink events to the downstream.
/// The size of `Error` and `ResolvedTS` are considered zero.
pub fn sink_event(&self, mut event: Event) {
event.set_request_id(self.req_id);
if self.sink.is_none() {
info!("drop event, no sink";
"conn_id" => ?self.conn_id, "downstream_id" => ?self.id);
return;
}
let sink = self.sink.as_ref().unwrap();
if let Err(e) = sink.try_send(CdcEvent::Event(event)) {
match e {
crossbeam::TrySendError::Disconnected(_) => {
debug!("send event failed, disconnected";
"conn_id" => ?self.conn_id, "downstream_id" => ?self.id);
}
crossbeam::TrySendError::Full(_) => {
info!("send event failed, full";
"conn_id" => ?self.conn_id, "downstream_id" => ?self.id);
}
}
}
}
pub fn set_sink(&mut self, sink: BatchSender<CdcEvent>) {
self.sink = Some(sink);
}
pub fn get_id(&self) -> DownstreamID {
self.id
}
pub fn get_state(&self) -> Arc<AtomicCell<DownstreamState>> {
self.state.clone()
}
pub fn get_conn_id(&self) -> ConnID {
self.conn_id
}
pub fn sink_duplicate_error(&self, region_id: u64) {
let mut change_data_event = Event::default();
let mut cdc_err = EventError::default();
let mut err = ErrorDuplicateRequest::default();
err.set_region_id(region_id);
cdc_err.set_duplicate_request(err);
change_data_event.event = Some(Event_oneof_event::Error(cdc_err));
change_data_event.region_id = region_id;
self.sink_event(change_data_event);
}
// TODO: merge it into Delegate::error_event.
pub fn sink_compatibility_error(&self, region_id: u64, compat: Compatibility) {
let mut change_data_event = Event::default();
let mut cdc_err = EventError::default();
cdc_err.set_compatibility(compat);
change_data_event.event = Some(Event_oneof_event::Error(cdc_err));
change_data_event.region_id = region_id;
self.sink_event(change_data_event);
}
}
#[derive(Default)]
struct Pending {
pub downstreams: Vec<Downstream>,
pub locks: Vec<PendingLock>,
pub pending_bytes: usize,
}
impl Drop for Pending {
fn drop(&mut self) {
CDC_PENDING_BYTES_GAUGE.sub(self.pending_bytes as i64);
}
}
impl Pending {
fn take_downstreams(&mut self) -> Vec<Downstream> {
mem::take(&mut self.downstreams)
}
fn take_locks(&mut self) -> Vec<PendingLock> {
mem::take(&mut self.locks)
}
}
enum PendingLock {
Track {
key: Vec<u8>,
start_ts: TimeStamp,
},
Untrack {
key: Vec<u8>,
start_ts: TimeStamp,
commit_ts: Option<TimeStamp>,
},
}
/// A CDC delegate of a raftstore region peer.
///
/// It converts raft commands into CDC events and broadcast to downstreams.
/// It also track trancation on the fly in order to compute resolved ts.
pub struct Delegate {
pub id: ObserveID,
pub region_id: u64,
region: Option<Region>,
pub downstreams: Vec<Downstream>,
pub resolver: Option<Resolver>,
pending: Option<Pending>,
enabled: Arc<AtomicBool>,
failed: bool,
pub txn_extra_op: TxnExtraOp,
}
impl Delegate {
/// Create a Delegate the given region.
pub fn new(region_id: u64) -> Delegate {
Delegate {
region_id,
id: ObserveID::new(),
downstreams: Vec::new(),
resolver: None,
region: None,
pending: Some(Pending::default()),
enabled: Arc::new(AtomicBool::new(true)),
failed: false,
txn_extra_op: TxnExtraOp::default(),
}
}
/// Returns a shared flag.
/// True if there are some active downstreams subscribe the region.
/// False if all downstreams has unsubscribed.
pub fn enabled(&self) -> Arc<AtomicBool> {
self.enabled.clone()
}
/// Return false if subscribe failed.
pub fn subscribe(&mut self, downstream: Downstream) -> bool {
if let Some(region) = self.region.as_ref() {
if let Err(e) = compare_region_epoch(
&downstream.region_epoch,
region,
false, /* check_conf_ver */
true, /* check_ver */
true, /* include_region */
) {
info!("fail to subscribe downstream";
"region_id" => region.get_id(),
"downstream_id" => ?downstream.get_id(),
"conn_id" => ?downstream.get_conn_id(),
"req_id" => downstream.req_id,
"err" => ?e);
let err = Error::Request(e.into());
let change_data_error = self.error_event(err);
downstream.sink_event(change_data_error);
return false;
}
self.downstreams.push(downstream);
} else {
self.pending.as_mut().unwrap().downstreams.push(downstream);
}
true
}
pub fn downstream(&self, downstream_id: DownstreamID) -> Option<&Downstream> {
self.downstreams.iter().find(|d| d.id == downstream_id)
}
pub fn downstreams(&self) -> &Vec<Downstream> {
if self.pending.is_some() {
&self.pending.as_ref().unwrap().downstreams
} else {
&self.downstreams
}
}
pub fn downstreams_mut(&mut self) -> &mut Vec<Downstream> {
if self.pending.is_some() {
&mut self.pending.as_mut().unwrap().downstreams
} else {
&mut self.downstreams
}
}
pub fn unsubscribe(&mut self, id: DownstreamID, err: Option<Error>) -> bool {
let change_data_error = err.map(|err| self.error_event(err));
let downstreams = self.downstreams_mut();
downstreams.retain(|d| {
if d.id == id {
if let Some(change_data_error) = change_data_error.clone() {
d.sink_event(change_data_error);
}
d.state.store(DownstreamState::Stopped);
}
d.id != id
});
let is_last = downstreams.is_empty();
if is_last {
self.enabled.store(false, Ordering::SeqCst);
}
is_last
}
fn error_event(&self, err: Error) -> Event {
let mut change_data_event = Event::default();
let mut cdc_err = EventError::default();
let mut err = err.extract_error_header();
if err.has_not_leader() {
let not_leader = err.take_not_leader();
cdc_err.set_not_leader(not_leader);
} else if err.has_epoch_not_match() {
let epoch_not_match = err.take_epoch_not_match();
cdc_err.set_epoch_not_match(epoch_not_match);
} else {
// TODO: Add more errors to the cdc protocol
let mut region_not_found = errorpb::RegionNotFound::default();
region_not_found.set_region_id(self.region_id);
cdc_err.set_region_not_found(region_not_found);
}
change_data_event.event = Some(Event_oneof_event::Error(cdc_err));
change_data_event.region_id = self.region_id;
change_data_event
}
pub fn mark_failed(&mut self) {
self.failed = true;
}
pub fn has_failed(&self) -> bool {
self.failed
}
/// Stop the delegate
///
/// This means the region has met an unrecoverable error for CDC.
/// It broadcasts errors to all downstream and stops.
pub fn stop(&mut self, err: Error) {
self.mark_failed();
// Stop observe further events.
self.enabled.store(false, Ordering::SeqCst);
info!("region met error";
"region_id" => self.region_id, "error" => ?err);
let change_data_err = self.error_event(err);
for d in &self.downstreams {
d.state.store(DownstreamState::Stopped);
}
self.broadcast(change_data_err, false);
}
fn broadcast(&self, change_data_event: Event, normal_only: bool) {
let downstreams = self.downstreams();
assert!(
!downstreams.is_empty(),
"region {} miss downstream, event: {:?}",
self.region_id,
change_data_event,
);
for i in 0..downstreams.len() - 1 {
if normal_only && downstreams[i].state.load() != DownstreamState::Normal {
continue;
}
downstreams[i].sink_event(change_data_event.clone());
}
downstreams.last().unwrap().sink_event(change_data_event);
}
/// Install a resolver and return pending downstreams.
pub fn on_region_ready(&mut self, mut resolver: Resolver, region: Region) -> Vec<Downstream> {
assert!(
self.resolver.is_none(),
"region {} resolver should not be ready",
self.region_id,
);
// Mark the delegate as initialized.
self.region = Some(region);
let mut pending = self.pending.take().unwrap();
for lock in pending.take_locks() {
match lock {
PendingLock::Track { key, start_ts } => resolver.track_lock(start_ts, key),
PendingLock::Untrack {
key,
start_ts,
commit_ts,
} => resolver.untrack_lock(start_ts, commit_ts, key),
}
}
self.resolver = Some(resolver);
info!("region is ready"; "region_id" => self.region_id);
pending.take_downstreams()
}
/// Try advance and broadcast resolved ts.
pub fn on_min_ts(&mut self, min_ts: TimeStamp) -> Option<TimeStamp> {
if self.resolver.is_none() {
debug!("region resolver not ready";
"region_id" => self.region_id, "min_ts" => min_ts);
return None;
}
debug!("try to advance ts"; "region_id" => self.region_id, "min_ts" => min_ts);
let resolver = self.resolver.as_mut().unwrap();
let resolved_ts = match resolver.resolve(min_ts) {
Some(rts) => rts,
None => return None,
};
debug!("resolved ts updated";
"region_id" => self.region_id, "resolved_ts" => resolved_ts);
CDC_RESOLVED_TS_GAP_HISTOGRAM
.observe((min_ts.physical() - resolved_ts.physical()) as f64 / 1000f64);
Some(resolved_ts)
}
pub fn on_batch(
&mut self,
batch: CmdBatch,
old_value_cb: Rc<RefCell<OldValueCallback>>,
old_value_cache: &mut OldValueCache,
) -> Result<()> {
// Stale CmdBatch, drop it sliently.
if batch.observe_id != self.id {
return Ok(());
}
for cmd in batch.into_iter(self.region_id) {
let Cmd {
index,
mut request,
mut response,
} = cmd;
if !response.get_header().has_error() {
if !request.has_admin_request() {
self.sink_data(
index,
request.requests.into(),
old_value_cb.clone(),
old_value_cache,
)?;
} else {
self.sink_admin(request.take_admin_request(), response.take_admin_response())?;
}
} else {
let err_header = response.mut_header().take_error();
self.mark_failed();
return Err(Error::Request(err_header));
}
}
Ok(())
}
pub fn on_scan(&mut self, downstream_id: DownstreamID, entries: Vec<Option<TxnEntry>>) {
let downstreams = if let Some(pending) = self.pending.as_mut() {
&pending.downstreams
} else {
&self.downstreams
};
let downstream = if let Some(d) = downstreams.iter().find(|d| d.id == downstream_id) {
d
} else {
warn!("downstream not found"; "downstream_id" => ?downstream_id, "region_id" => self.region_id);
return;
};
let entries_len = entries.len();
let mut rows = vec![Vec::with_capacity(entries_len)];
let mut current_rows_size: usize = 0;
for entry in entries {
match entry {
Some(TxnEntry::Prewrite {
default,
lock,
old_value,
}) => {
let mut row = EventRow::default();
let skip = decode_lock(lock.0, &lock.1, &mut row);
if skip {
continue;
}
decode_default(default.1, &mut row);
let row_size = row.key.len() + row.value.len();
if current_rows_size + row_size >= EVENT_MAX_SIZE {
rows.push(Vec::with_capacity(entries_len));
current_rows_size = 0;
}
current_rows_size += row_size;
row.old_value = old_value.unwrap_or_default();
rows.last_mut().unwrap().push(row);
}
Some(TxnEntry::Commit {
default,
write,
old_value,
}) => {
let mut row = EventRow::default();
let skip = decode_write(write.0, &write.1, &mut row);
if skip {
continue;
}
decode_default(default.1, &mut row);
// This type means the row is self-contained, it has,
// 1. start_ts
// 2. commit_ts
// 3. key
// 4. value
if row.get_type() == EventLogType::Rollback {
// We dont need to send rollbacks to downstream,
// because downstream does not needs rollback to clean
// prewrite as it drops all previous stashed data.
continue;
}
set_event_row_type(&mut row, EventLogType::Committed);
row.old_value = old_value.unwrap_or_default();
let row_size = row.key.len() + row.value.len();
if current_rows_size + row_size >= EVENT_MAX_SIZE {
rows.push(Vec::with_capacity(entries_len));
current_rows_size = 0;
}
current_rows_size += row_size;
rows.last_mut().unwrap().push(row);
}
None => {
let mut row = EventRow::default();
// This type means scan has finised.
set_event_row_type(&mut row, EventLogType::Initialized);
rows.last_mut().unwrap().push(row);
}
}
}
for rs in rows {
if !rs.is_empty() {
let mut event_entries = EventEntries::default();
event_entries.entries = rs.into();
let mut event = Event::default();
event.region_id = self.region_id;
event.event = Some(Event_oneof_event::Entries(event_entries));
downstream.sink_event(event);
}
}
}
fn sink_data(
&mut self,
index: u64,
requests: Vec<Request>,
old_value_cb: Rc<RefCell<OldValueCallback>>,
old_value_cache: &mut OldValueCache,
) -> Result<()> {
let mut rows = HashMap::default();
for mut req in requests {
// CDC cares about put requests only.
if req.get_cmd_type() != CmdType::Put {
// Do not log delete requests because they are issued by GC
// frequently.
if req.get_cmd_type() != CmdType::Delete {
debug!(
"skip other command";
"region_id" => self.region_id,
"command" => ?req,
);
}
continue;
}
let mut put = req.take_put();
match put.cf.as_str() {
"write" => {
let mut row = EventRow::default();
let skip = decode_write(put.take_key(), put.get_value(), &mut row);
if skip {
continue;
}
// In order to advance resolved ts,
// we must untrack inflight txns if they are committed.
let commit_ts = if row.commit_ts == 0 {
None
} else {
Some(row.commit_ts)
};
match self.resolver {
Some(ref mut resolver) => resolver.untrack_lock(
row.start_ts.into(),
commit_ts.map(Into::into),
row.key.clone(),
),
None => {
assert!(self.pending.is_some(), "region resolver not ready");
let pending = self.pending.as_mut().unwrap();
pending.locks.push(PendingLock::Untrack {
key: row.key.clone(),
start_ts: row.start_ts.into(),
commit_ts: commit_ts.map(Into::into),
});
pending.pending_bytes += row.key.len();
CDC_PENDING_BYTES_GAUGE.add(row.key.len() as i64);
}
}
let r = rows.insert(row.key.clone(), row);
assert!(r.is_none());
}
"lock" => {
let mut row = EventRow::default();
let skip = decode_lock(put.take_key(), put.get_value(), &mut row);
if skip {
continue;
}
if self.txn_extra_op == TxnExtraOp::ReadOldValue {
let key = Key::from_raw(&row.key).append_ts(row.start_ts.into());
let start = Instant::now();
let mut statistics = Statistics::default();
row.old_value =
old_value_cb.borrow_mut()(key, old_value_cache, &mut statistics)
.unwrap_or_default();
CDC_OLD_VALUE_DURATION_HISTOGRAM
.with_label_values(&["all"])
.observe(start.elapsed().as_secs_f64());
for (cf, cf_details) in statistics.details().iter() {
for (tag, count) in cf_details.iter() {
CDC_OLD_VALUE_SCAN_DETAILS
.with_label_values(&[*cf, *tag])
.inc_by(*count as i64);
}
}
}
let occupied = rows.entry(row.key.clone()).or_default();
if !occupied.value.is_empty() {
assert!(row.value.is_empty());
let mut value = vec![];
mem::swap(&mut occupied.value, &mut value);
row.value = value;
}
// In order to compute resolved ts,
// we must track inflight txns.
match self.resolver {
Some(ref mut resolver) => {
resolver.track_lock(row.start_ts.into(), row.key.clone())
}
None => {
assert!(self.pending.is_some(), "region resolver not ready");
let pending = self.pending.as_mut().unwrap();
pending.locks.push(PendingLock::Track {
key: row.key.clone(),
start_ts: row.start_ts.into(),
});
pending.pending_bytes += row.key.len();
CDC_PENDING_BYTES_GAUGE.add(row.key.len() as i64);
}
}
*occupied = row;
}
"" | "default" => {
let key = Key::from_encoded(put.take_key()).truncate_ts().unwrap();
let row = rows.entry(key.into_raw().unwrap()).or_default();
decode_default(put.take_value(), row);
}
other => {
panic!("invalid cf {}", other);
}
}
}
let mut entries = Vec::with_capacity(rows.len());
for (_, v) in rows {
entries.push(v);
}
let mut event_entries = EventEntries::default();
event_entries.entries = entries.into();
let mut change_data_event = Event::default();
change_data_event.region_id = self.region_id;
change_data_event.index = index;
change_data_event.event = Some(Event_oneof_event::Entries(event_entries));
self.broadcast(change_data_event, true);
Ok(())
}
fn sink_admin(&mut self, request: AdminRequest, mut response: AdminResponse) -> Result<()> {
let store_err = match request.get_cmd_type() {
AdminCmdType::Split => RaftStoreError::EpochNotMatch(
"split".to_owned(),
vec![
response.mut_split().take_left(),
response.mut_split().take_right(),
],
),
AdminCmdType::BatchSplit => RaftStoreError::EpochNotMatch(
"batchsplit".to_owned(),
response.mut_splits().take_regions().into(),
),
AdminCmdType::PrepareMerge
| AdminCmdType::CommitMerge
| AdminCmdType::RollbackMerge => {
RaftStoreError::EpochNotMatch("merge".to_owned(), vec![])
}
_ => return Ok(()),
};
self.mark_failed();
Err(Error::Request(store_err.into()))
}
}
fn set_event_row_type(row: &mut EventRow, ty: EventLogType) {
#[cfg(feature = "prost-codec")]
{
row.r#type = ty.into();
}
#[cfg(not(feature = "prost-codec"))]
{
row.r_type = ty;
}
}
fn decode_write(key: Vec<u8>, value: &[u8], row: &mut EventRow) -> bool {
let write = WriteRef::parse(value).unwrap().to_owned();
let (op_type, r_type) = match write.write_type {
WriteType::Put => (EventRowOpType::Put, EventLogType::Commit),
WriteType::Delete => (EventRowOpType::Delete, EventLogType::Commit),
WriteType::Rollback => (EventRowOpType::Unknown, EventLogType::Rollback),
other => {
debug!("skip write record"; "write" => ?other, "key" => hex::encode_upper(key));
return true;
}
};
let key = Key::from_encoded(key);
let commit_ts = if write.write_type == WriteType::Rollback {
0
} else {
key.decode_ts().unwrap().into_inner()
};
row.start_ts = write.start_ts.into_inner();
row.commit_ts = commit_ts;
row.key = key.truncate_ts().unwrap().into_raw().unwrap();
row.op_type = op_type.into();
set_event_row_type(row, r_type);
if let Some(value) = write.short_value {
row.value = value;
}
false
}
fn decode_lock(key: Vec<u8>, value: &[u8], row: &mut EventRow) -> bool {
let lock = Lock::parse(value).unwrap();
let op_type = match lock.lock_type {
LockType::Put => EventRowOpType::Put,
LockType::Delete => EventRowOpType::Delete,
other => {
debug!("skip lock record";
"type" => ?other,
"start_ts" => ?lock.ts,
"key" => hex::encode_upper(key),
"for_update_ts" => ?lock.for_update_ts);
return true;
}
};
let key = Key::from_encoded(key);
row.start_ts = lock.ts.into_inner();
row.key = key.into_raw().unwrap();
row.op_type = op_type.into();
set_event_row_type(row, EventLogType::Prewrite);
if let Some(value) = lock.short_value {
row.value = value;
}
false
}
fn decode_default(value: Vec<u8>, row: &mut EventRow) {
if !value.is_empty() {
row.value = value.to_vec();
}
}
#[cfg(test)]
mod tests {
use super::*;
use futures::executor::block_on;
use futures::stream::StreamExt;
use kvproto::errorpb::Error as ErrorHeader;
use kvproto::metapb::Region;
use std::cell::Cell;
use tikv::storage::mvcc::test_util::*;
use tikv_util::mpsc::batch::{self, BatchReceiver, VecCollector};
#[test]
fn test_error() {
let region_id = 1;
let mut region = Region::default();
region.set_id(region_id);
region.mut_peers().push(Default::default());
region.mut_region_epoch().set_version(2);
region.mut_region_epoch().set_conf_ver(2);
let region_epoch = region.get_region_epoch().clone();
let (sink, rx) = batch::unbounded(1);
let rx = BatchReceiver::new(rx, 1, Vec::new, VecCollector);
let request_id = 123;
let mut downstream =
Downstream::new(String::new(), region_epoch, request_id, ConnID::new());
downstream.set_sink(sink);
let mut delegate = Delegate::new(region_id);
delegate.subscribe(downstream);
let enabled = delegate.enabled();
assert!(enabled.load(Ordering::SeqCst));
let mut resolver = Resolver::new(region_id);
resolver.init();
for downstream in delegate.on_region_ready(resolver, region) {
delegate.subscribe(downstream);
}
let rx_wrap = Cell::new(Some(rx));
let receive_error = || {
let (resps, rx) = block_on(rx_wrap.replace(None).unwrap().into_future());
rx_wrap.set(Some(rx));
let mut resps = resps.unwrap();
assert_eq!(resps.len(), 1);
for r in &resps {
if let CdcEvent::Event(e) = r {
assert_eq!(e.get_request_id(), request_id);
}
}
let cdc_event = &mut resps[0];
if let CdcEvent::Event(e) = cdc_event {
let event = e.event.take().unwrap();
match event {
Event_oneof_event::Error(err) => err,
other => panic!("unknown event {:?}", other),
}
} else {
panic!("unknown event")
}
};
let mut err_header = ErrorHeader::default();
err_header.set_not_leader(Default::default());
delegate.stop(Error::Request(err_header));
let err = receive_error();
assert!(err.has_not_leader());
// Enable is disabled by any error.
assert!(!enabled.load(Ordering::SeqCst));
let mut err_header = ErrorHeader::default();
err_header.set_region_not_found(Default::default());
delegate.stop(Error::Request(err_header));
let err = receive_error();
assert!(err.has_region_not_found());
let mut err_header = ErrorHeader::default();
err_header.set_epoch_not_match(Default::default());
delegate.stop(Error::Request(err_header));
let err = receive_error();
assert!(err.has_epoch_not_match());
// Split
let mut region = Region::default();
region.set_id(1);
let mut request = AdminRequest::default();
request.set_cmd_type(AdminCmdType::Split);
let mut response = AdminResponse::default();
response.mut_split().set_left(region.clone());
let err = delegate.sink_admin(request, response).err().unwrap();
delegate.stop(err);
let mut err = receive_error();
assert!(err.has_epoch_not_match());
err.take_epoch_not_match()
.current_regions
.into_iter()
.find(|r| r.get_id() == 1)
.unwrap();
let mut request = AdminRequest::default();
request.set_cmd_type(AdminCmdType::BatchSplit);
let mut response = AdminResponse::default();
response.mut_splits().set_regions(vec![region].into());
let err = delegate.sink_admin(request, response).err().unwrap();
delegate.stop(err);
let mut err = receive_error();
assert!(err.has_epoch_not_match());
err.take_epoch_not_match()
.current_regions
.into_iter()
.find(|r| r.get_id() == 1)
.unwrap();
// Merge
let mut request = AdminRequest::default();
request.set_cmd_type(AdminCmdType::PrepareMerge);
let response = AdminResponse::default();
let err = delegate.sink_admin(request, response).err().unwrap();
delegate.stop(err);
let mut err = receive_error();
assert!(err.has_epoch_not_match());
assert!(err.take_epoch_not_match().current_regions.is_empty());
let mut request = AdminRequest::default();
request.set_cmd_type(AdminCmdType::CommitMerge);
let response = AdminResponse::default();
let err = delegate.sink_admin(request, response).err().unwrap();
delegate.stop(err);
let mut err = receive_error();
assert!(err.has_epoch_not_match());
assert!(err.take_epoch_not_match().current_regions.is_empty());
let mut request = AdminRequest::default();
request.set_cmd_type(AdminCmdType::RollbackMerge);
let response = AdminResponse::default();
let err = delegate.sink_admin(request, response).err().unwrap();
delegate.stop(err);
let mut err = receive_error();
assert!(err.has_epoch_not_match());
assert!(err.take_epoch_not_match().current_regions.is_empty());
}
#[test]
fn test_scan() {
let region_id = 1;
let mut region = Region::default();
region.set_id(region_id);
region.mut_peers().push(Default::default());
region.mut_region_epoch().set_version(2);
region.mut_region_epoch().set_conf_ver(2);
let region_epoch = region.get_region_epoch().clone();
let (sink, rx) = batch::unbounded(1);
let rx = BatchReceiver::new(rx, 1, Vec::new, VecCollector);
let request_id = 123;
let mut downstream =
Downstream::new(String::new(), region_epoch, request_id, ConnID::new());
let downstream_id = downstream.get_id();
downstream.set_sink(sink);
let mut delegate = Delegate::new(region_id);
delegate.subscribe(downstream);
let enabled = delegate.enabled();
assert!(enabled.load(Ordering::SeqCst));
let rx_wrap = Cell::new(Some(rx));
let check_event = |event_rows: Vec<EventRow>| {
let (resps, rx) = block_on(rx_wrap.replace(None).unwrap().into_future());
rx_wrap.set(Some(rx));
let mut resps = resps.unwrap();
assert_eq!(resps.len(), 1);
for r in &resps {
if let CdcEvent::Event(e) = r {
assert_eq!(e.get_request_id(), request_id);
}
}
let cdc_event = resps.remove(0);
if let CdcEvent::Event(mut e) = cdc_event {
assert_eq!(e.region_id, region_id);
assert_eq!(e.index, 0);
let event = e.event.take().unwrap();
match event {
Event_oneof_event::Entries(entries) => {
assert_eq!(entries.entries.as_slice(), event_rows.as_slice());
}
other => panic!("unknown event {:?}", other),
}
}
};
// Stashed in pending before region ready.
let entries = vec![
Some(
EntryBuilder::default()
.key(b"a")
.value(b"b")
.start_ts(1.into())
.commit_ts(0.into())
.primary(&[])
.for_update_ts(0.into())
.build_prewrite(LockType::Put, false),
),
Some(
EntryBuilder::default()
.key(b"a")
.value(b"b")
.start_ts(1.into())
.commit_ts(2.into())
.primary(&[])
.for_update_ts(0.into())
.build_commit(WriteType::Put, false),
),
Some(
EntryBuilder::default()
.key(b"a")
.value(b"b")
.start_ts(3.into())
.commit_ts(0.into())
.primary(&[])
.for_update_ts(0.into())
.build_rollback(),
),
None,
];
delegate.on_scan(downstream_id, entries);
// Flush all pending entries.
let mut row1 = EventRow::default();
row1.start_ts = 1;
row1.commit_ts = 0;
row1.key = b"a".to_vec();
row1.op_type = EventRowOpType::Put.into();
set_event_row_type(&mut row1, EventLogType::Prewrite);
row1.value = b"b".to_vec();
let mut row2 = EventRow::default();
row2.start_ts = 1;
row2.commit_ts = 2;
row2.key = b"a".to_vec();
row2.op_type = EventRowOpType::Put.into();
set_event_row_type(&mut row2, EventLogType::Committed);
row2.value = b"b".to_vec();
let mut row3 = EventRow::default();
set_event_row_type(&mut row3, EventLogType::Initialized);
check_event(vec![row1, row2, row3]);
let mut resolver = Resolver::new(region_id);
resolver.init();
delegate.on_region_ready(resolver, region);
}
}
| 36.647573 | 108 | 0.528439 |
5db8dd674e37f7f8e60d0563e3b17bc09d0d53b1 | 4,552 | use crate::render_features::render_features_prelude::*;
use crate::render_features::RenderFeatureFlag;
/// The `SubmitNodeBlock` is a collection of `SubmitNode` associated with a particular `RenderFeature`,
/// `RenderView`, and `RenderPhase`. There should be a 1:1 mapping between `SubmitNode`s and draw calls
/// from the `RenderFeature`'s `WriteJob`. The `Renderer` will combine all `SubmitNodeBlock`s sharing the
/// same `RenderView` and `RenderPhase` into a sorted `ViewPhaseSubmitNodeBlock`.
pub struct SubmitNodeBlock<SubmitPacketDataT: SubmitPacketData> {
feature_index: RenderFeatureIndex,
render_phase: RenderPhaseIndex,
submit_nodes: AtomicOnceCellStack<SubmitNode<SubmitPacketDataT::SubmitNodeData>>,
}
impl<SubmitPacketDataT: 'static + Sync + Send + SubmitPacketData>
SubmitNodeBlock<SubmitPacketDataT>
{
pub fn len(&self) -> usize {
self.submit_nodes.len()
}
/// Creates a `SubmitNodeBlock` with a capacity of `num_submit_nodes` if the `RenderView`
/// supports the `RenderPhase`, otherwise the capacity will be set to `0`.
pub fn with_capacity<RenderPhaseT: RenderPhase>(
view: &RenderView,
num_submit_nodes: usize,
) -> Self {
Self {
feature_index: SubmitPacketDataT::RenderFeature::feature_index(),
render_phase: RenderPhaseT::render_phase_index(),
submit_nodes: AtomicOnceCellStack::with_capacity(
if view.phase_is_relevant::<RenderPhaseT>() {
num_submit_nodes
} else {
0
},
),
}
}
/// Creates a `SubmitNodeBlock` with a capacity of `num_submit_nodes` if the `RenderView`
/// supports the `RenderPhase` and `RenderFeatureFlag`, otherwise the capacity will be set to `0`.
pub fn with_capacity_and_feature_flag<
RenderPhaseT: RenderPhase,
RenderFeatureFlagT: RenderFeatureFlag,
>(
view: &RenderView,
num_submit_nodes: usize,
) -> Self {
Self {
feature_index: SubmitPacketDataT::RenderFeature::feature_index(),
render_phase: RenderPhaseT::render_phase_index(),
submit_nodes: AtomicOnceCellStack::with_capacity(
if view.phase_is_relevant::<RenderPhaseT>()
&& view.feature_flag_is_relevant::<RenderFeatureFlagT>()
{
num_submit_nodes
} else {
0
},
),
}
}
pub fn push_submit_node(
&self,
data: SubmitPacketDataT::SubmitNodeData,
sort_key: SubmitNodeSortKey,
distance: f32,
) -> SubmitNodeId {
self.submit_nodes.push(SubmitNode {
sort_key,
distance,
data,
}) as SubmitNodeId
}
pub fn get_submit_node_data(
&self,
index: SubmitNodeId,
) -> &SubmitNode<SubmitPacketDataT::SubmitNodeData> {
self.submit_nodes.get(index as usize)
}
pub fn is_relevant(
&self,
render_phase: RenderPhaseIndex,
) -> bool {
self.render_phase == render_phase
}
}
impl<SubmitPacketDataT: 'static + Sync + Send + SubmitPacketData> RenderFeatureSubmitNodeBlock
for SubmitNodeBlock<SubmitPacketDataT>
{
fn render_phase(&self) -> RenderPhaseIndex {
self.render_phase
}
fn num_submit_nodes(&self) -> usize {
self.len()
}
fn get_submit_node(
&self,
submit_node_id: SubmitNodeId,
) -> RenderFeatureSubmitNode {
let submit_node = self.get_submit_node_data(submit_node_id);
RenderFeatureSubmitNode::new(
self.feature_index,
submit_node_id,
submit_node.sort_key,
submit_node.distance,
)
}
fn feature_index(&self) -> RenderFeatureIndex {
self.feature_index
}
}
/// Each `SubmitNode` contains the data needed for the `RenderFeature`'s `RenderFeatureWriteJob` to
/// render a draw call by referencing data in the frame packet, submit packet, render objects set, or
/// some other storage. `SubmitNode`s will be sorted by the `RenderPhase` after they are combined into
/// a `ViewPhaseSubmitNodeBlock`.
pub struct SubmitNode<T> {
pub sort_key: SubmitNodeSortKey,
pub distance: f32,
pub data: T,
}
impl<T: Default> SubmitNode<T> {
pub fn new() -> Self {
Self {
sort_key: 0,
distance: 0.,
data: T::default(),
}
}
}
| 32.514286 | 105 | 0.628295 |
690d8273d8b6fe7664532a8d0cef3dc6c0a665a5 | 18,638 | extern crate serde;
extern crate rltk;
use rltk::{Console, GameState, Rltk, Point};
extern crate specs;
use specs::prelude::*;
use specs::saveload::{SimpleMarker, SimpleMarkerAllocator};
#[macro_use]
extern crate specs_derive;
mod components;
pub use components::*;
mod map;
pub use map::*;
mod player;
use player::*;
mod rect;
pub use rect::Rect;
mod visibility_system;
use visibility_system::VisibilitySystem;
mod monster_ai_system;
use monster_ai_system::MonsterAI;
mod map_indexing_system;
use map_indexing_system::MapIndexingSystem;
mod melee_combat_system;
use melee_combat_system::MeleeCombatSystem;
mod damage_system;
use damage_system::DamageSystem;
mod gui;
mod gamelog;
mod spawner;
mod inventory_system;
use inventory_system::{ ItemCollectionSystem, ItemUseSystem, ItemDropSystem, ItemRemoveSystem };
pub mod saveload_system;
pub mod random_table;
pub mod particle_system;
pub mod hunger_system;
pub mod rex_assets;
pub mod trigger_system;
pub mod map_builders;
rltk::add_wasm_support!();
const SHOW_MAPGEN_VISUALIZER : bool = true;
#[derive(PartialEq, Copy, Clone)]
pub enum RunState { AwaitingInput,
PreRun,
PlayerTurn,
MonsterTurn,
ShowInventory,
ShowDropItem,
ShowTargeting { range : i32, item : Entity},
MainMenu { menu_selection : gui::MainMenuSelection },
SaveGame,
NextLevel,
ShowRemoveItem,
GameOver,
MagicMapReveal { row : i32 },
MapGeneration
}
pub struct State {
pub ecs: World,
mapgen_next_state : Option<RunState>,
mapgen_history : Vec<Map>,
mapgen_index : usize,
mapgen_timer : f32
}
impl State {
fn run_systems(&mut self) {
let mut vis = VisibilitySystem{};
vis.run_now(&self.ecs);
let mut mob = MonsterAI{};
mob.run_now(&self.ecs);
let mut mapindex = MapIndexingSystem{};
mapindex.run_now(&self.ecs);
let mut triggers = trigger_system::TriggerSystem{};
triggers.run_now(&self.ecs);
let mut melee = MeleeCombatSystem{};
melee.run_now(&self.ecs);
let mut damage = DamageSystem{};
damage.run_now(&self.ecs);
let mut pickup = ItemCollectionSystem{};
pickup.run_now(&self.ecs);
let mut itemuse = ItemUseSystem{};
itemuse.run_now(&self.ecs);
let mut drop_items = ItemDropSystem{};
drop_items.run_now(&self.ecs);
let mut item_remove = ItemRemoveSystem{};
item_remove.run_now(&self.ecs);
let mut hunger = hunger_system::HungerSystem{};
hunger.run_now(&self.ecs);
let mut particles = particle_system::ParticleSpawnSystem{};
particles.run_now(&self.ecs);
self.ecs.maintain();
}
}
impl GameState for State {
fn tick(&mut self, ctx : &mut Rltk) {
let mut newrunstate;
{
let runstate = self.ecs.fetch::<RunState>();
newrunstate = *runstate;
}
ctx.cls();
particle_system::cull_dead_particles(&mut self.ecs, ctx);
match newrunstate {
RunState::MainMenu{..} => {}
RunState::GameOver{..} => {}
_ => {
draw_map(&self.ecs.fetch::<Map>(), ctx);
let positions = self.ecs.read_storage::<Position>();
let renderables = self.ecs.read_storage::<Renderable>();
let hidden = self.ecs.read_storage::<Hidden>();
let map = self.ecs.fetch::<Map>();
let mut data = (&positions, &renderables, !&hidden).join().collect::<Vec<_>>();
data.sort_by(|&a, &b| b.1.render_order.cmp(&a.1.render_order) );
for (pos, render, _hidden) in data.iter() {
let idx = map.xy_idx(pos.x, pos.y);
if map.visible_tiles[idx] { ctx.set(pos.x, pos.y, render.fg, render.bg, render.glyph) }
}
gui::draw_ui(&self.ecs, ctx);
}
}
match newrunstate {
RunState::MapGeneration => {
if !SHOW_MAPGEN_VISUALIZER {
newrunstate = self.mapgen_next_state.unwrap();
}
ctx.cls();
draw_map(&self.mapgen_history[self.mapgen_index], ctx);
self.mapgen_timer += ctx.frame_time_ms;
if self.mapgen_timer > 200.0 {
self.mapgen_timer = 0.0;
self.mapgen_index += 1;
if self.mapgen_index >= self.mapgen_history.len() {
//self.mapgen_index -= 1;
newrunstate = self.mapgen_next_state.unwrap();
}
}
}
RunState::PreRun => {
self.run_systems();
self.ecs.maintain();
newrunstate = RunState::AwaitingInput;
}
RunState::AwaitingInput => {
newrunstate = player_input(self, ctx);
}
RunState::PlayerTurn => {
self.run_systems();
self.ecs.maintain();
match *self.ecs.fetch::<RunState>() {
RunState::MagicMapReveal{ .. } => newrunstate = RunState::MagicMapReveal{ row: 0 },
_ => newrunstate = RunState::MonsterTurn
}
}
RunState::MonsterTurn => {
self.run_systems();
self.ecs.maintain();
newrunstate = RunState::AwaitingInput;
}
RunState::ShowInventory => {
let result = gui::show_inventory(self, ctx);
match result.0 {
gui::ItemMenuResult::Cancel => newrunstate = RunState::AwaitingInput,
gui::ItemMenuResult::NoResponse => {}
gui::ItemMenuResult::Selected => {
let item_entity = result.1.unwrap();
let is_ranged = self.ecs.read_storage::<Ranged>();
let is_item_ranged = is_ranged.get(item_entity);
if let Some(is_item_ranged) = is_item_ranged {
newrunstate = RunState::ShowTargeting{ range: is_item_ranged.range, item: item_entity };
} else {
let mut intent = self.ecs.write_storage::<WantsToUseItem>();
intent.insert(*self.ecs.fetch::<Entity>(), WantsToUseItem{ item: item_entity, target: None }).expect("Unable to insert intent");
newrunstate = RunState::PlayerTurn;
}
}
}
}
RunState::ShowDropItem => {
let result = gui::drop_item_menu(self, ctx);
match result.0 {
gui::ItemMenuResult::Cancel => newrunstate = RunState::AwaitingInput,
gui::ItemMenuResult::NoResponse => {}
gui::ItemMenuResult::Selected => {
let item_entity = result.1.unwrap();
let mut intent = self.ecs.write_storage::<WantsToDropItem>();
intent.insert(*self.ecs.fetch::<Entity>(), WantsToDropItem{ item: item_entity }).expect("Unable to insert intent");
newrunstate = RunState::PlayerTurn;
}
}
}
RunState::ShowRemoveItem => {
let result = gui::remove_item_menu(self, ctx);
match result.0 {
gui::ItemMenuResult::Cancel => newrunstate = RunState::AwaitingInput,
gui::ItemMenuResult::NoResponse => {}
gui::ItemMenuResult::Selected => {
let item_entity = result.1.unwrap();
let mut intent = self.ecs.write_storage::<WantsToRemoveItem>();
intent.insert(*self.ecs.fetch::<Entity>(), WantsToRemoveItem{ item: item_entity }).expect("Unable to insert intent");
newrunstate = RunState::PlayerTurn;
}
}
}
RunState::ShowTargeting{range, item} => {
let result = gui::ranged_target(self, ctx, range);
match result.0 {
gui::ItemMenuResult::Cancel => newrunstate = RunState::AwaitingInput,
gui::ItemMenuResult::NoResponse => {}
gui::ItemMenuResult::Selected => {
let mut intent = self.ecs.write_storage::<WantsToUseItem>();
intent.insert(*self.ecs.fetch::<Entity>(), WantsToUseItem{ item, target: result.1 }).expect("Unable to insert intent");
newrunstate = RunState::PlayerTurn;
}
}
}
RunState::MainMenu{ .. } => {
let result = gui::main_menu(self, ctx);
match result {
gui::MainMenuResult::NoSelection{ selected } => newrunstate = RunState::MainMenu{ menu_selection: selected },
gui::MainMenuResult::Selected{ selected } => {
match selected {
gui::MainMenuSelection::NewGame => newrunstate = RunState::PreRun,
gui::MainMenuSelection::LoadGame => {
saveload_system::load_game(&mut self.ecs);
newrunstate = RunState::AwaitingInput;
saveload_system::delete_save();
}
gui::MainMenuSelection::Quit => { ::std::process::exit(0); }
}
}
}
}
RunState::GameOver => {
let result = gui::game_over(ctx);
match result {
gui::GameOverResult::NoSelection => {}
gui::GameOverResult::QuitToMenu => {
self.game_over_cleanup();
newrunstate = RunState::MapGeneration;
self.mapgen_next_state = Some(RunState::MainMenu{ menu_selection: gui::MainMenuSelection::NewGame });
}
}
}
RunState::SaveGame => {
saveload_system::save_game(&mut self.ecs);
newrunstate = RunState::MainMenu{ menu_selection : gui::MainMenuSelection::LoadGame };
}
RunState::NextLevel => {
self.goto_next_level();
self.mapgen_next_state = Some(RunState::PreRun);
newrunstate = RunState::MapGeneration;
}
RunState::MagicMapReveal{row} => {
let mut map = self.ecs.fetch_mut::<Map>();
for x in 0..MAPWIDTH {
let idx = map.xy_idx(x as i32,row);
map.revealed_tiles[idx] = true;
}
if row as usize == MAPHEIGHT-1 {
newrunstate = RunState::MonsterTurn;
} else {
newrunstate = RunState::MagicMapReveal{ row: row+1 };
}
}
}
{
let mut runwriter = self.ecs.write_resource::<RunState>();
*runwriter = newrunstate;
}
damage_system::delete_the_dead(&mut self.ecs);
}
}
impl State {
fn entities_to_remove_on_level_change(&mut self) -> Vec<Entity> {
let entities = self.ecs.entities();
let player = self.ecs.read_storage::<Player>();
let backpack = self.ecs.read_storage::<InBackpack>();
let player_entity = self.ecs.fetch::<Entity>();
let equipped = self.ecs.read_storage::<Equipped>();
let mut to_delete : Vec<Entity> = Vec::new();
for entity in entities.join() {
let mut should_delete = true;
// Don't delete the player
let p = player.get(entity);
if let Some(_p) = p {
should_delete = false;
}
// Don't delete the player's equipment
let bp = backpack.get(entity);
if let Some(bp) = bp {
if bp.owner == *player_entity {
should_delete = false;
}
}
let eq = equipped.get(entity);
if let Some(eq) = eq {
if eq.owner == *player_entity {
should_delete = false;
}
}
if should_delete {
to_delete.push(entity);
}
}
to_delete
}
fn goto_next_level(&mut self) {
// Delete entities that aren't the player or his/her equipment
let to_delete = self.entities_to_remove_on_level_change();
for target in to_delete {
self.ecs.delete_entity(target).expect("Unable to delete entity");
}
// Build a new map and place the player
let current_depth;
{
let worldmap_resource = self.ecs.fetch::<Map>();
current_depth = worldmap_resource.depth;
}
self.generate_world_map(current_depth + 1);
// Notify the player and give them some health
let player_entity = self.ecs.fetch::<Entity>();
let mut gamelog = self.ecs.fetch_mut::<gamelog::GameLog>();
gamelog.entries.insert(0, "You descend to the next level, and take a moment to heal.".to_string());
let mut player_health_store = self.ecs.write_storage::<CombatStats>();
let player_health = player_health_store.get_mut(*player_entity);
if let Some(player_health) = player_health {
player_health.hp = i32::max(player_health.hp, player_health.max_hp / 2);
}
}
fn game_over_cleanup(&mut self) {
// Delete everything
let mut to_delete = Vec::new();
for e in self.ecs.entities().join() {
to_delete.push(e);
}
for del in to_delete.iter() {
self.ecs.delete_entity(*del).expect("Deletion failed");
}
// Spawn a new player
{
let player_entity = spawner::player(&mut self.ecs, 0, 0);
let mut player_entity_writer = self.ecs.write_resource::<Entity>();
*player_entity_writer = player_entity;
}
// Build a new map and place the player
self.generate_world_map(1);
}
fn generate_world_map(&mut self, new_depth : i32) {
self.mapgen_index = 0;
self.mapgen_timer = 0.0;
self.mapgen_history.clear();
let mut rng = self.ecs.write_resource::<rltk::RandomNumberGenerator>();
let mut builder = map_builders::random_builder(new_depth, &mut rng);
builder.build_map(&mut rng);
self.mapgen_history = builder.build_data.history.clone();
let player_start;
{
let mut worldmap_resource = self.ecs.write_resource::<Map>();
*worldmap_resource = builder.build_data.map.clone();
player_start = builder.build_data.starting_position.as_mut().unwrap().clone();
}
// Spawn bad guys
std::mem::drop(rng);
builder.spawn_entities(&mut self.ecs);
// Place the player and update resources
let (player_x, player_y) = (player_start.x, player_start.y);
let mut player_position = self.ecs.write_resource::<Point>();
*player_position = Point::new(player_x, player_y);
let mut position_components = self.ecs.write_storage::<Position>();
let player_entity = self.ecs.fetch::<Entity>();
let player_pos_comp = position_components.get_mut(*player_entity);
if let Some(player_pos_comp) = player_pos_comp {
player_pos_comp.x = player_x;
player_pos_comp.y = player_y;
}
// Mark the player's visibility as dirty
let mut viewshed_components = self.ecs.write_storage::<Viewshed>();
let vs = viewshed_components.get_mut(*player_entity);
if let Some(vs) = vs {
vs.dirty = true;
}
}
}
fn main() {
let mut context = Rltk::init_simple8x8(80, 50, "Hello Rust World", "resources");
context.with_post_scanlines(true);
let mut gs = State {
ecs: World::new(),
mapgen_next_state : Some(RunState::MainMenu{ menu_selection: gui::MainMenuSelection::NewGame }),
mapgen_index : 0,
mapgen_history: Vec::new(),
mapgen_timer: 0.0
};
gs.ecs.register::<Position>();
gs.ecs.register::<Renderable>();
gs.ecs.register::<Player>();
gs.ecs.register::<Viewshed>();
gs.ecs.register::<Monster>();
gs.ecs.register::<Name>();
gs.ecs.register::<BlocksTile>();
gs.ecs.register::<CombatStats>();
gs.ecs.register::<WantsToMelee>();
gs.ecs.register::<SufferDamage>();
gs.ecs.register::<Item>();
gs.ecs.register::<ProvidesHealing>();
gs.ecs.register::<InflictsDamage>();
gs.ecs.register::<AreaOfEffect>();
gs.ecs.register::<Consumable>();
gs.ecs.register::<Ranged>();
gs.ecs.register::<InBackpack>();
gs.ecs.register::<WantsToPickupItem>();
gs.ecs.register::<WantsToUseItem>();
gs.ecs.register::<WantsToDropItem>();
gs.ecs.register::<Confusion>();
gs.ecs.register::<SimpleMarker<SerializeMe>>();
gs.ecs.register::<SerializationHelper>();
gs.ecs.register::<Equippable>();
gs.ecs.register::<Equipped>();
gs.ecs.register::<MeleePowerBonus>();
gs.ecs.register::<DefenseBonus>();
gs.ecs.register::<WantsToRemoveItem>();
gs.ecs.register::<ParticleLifetime>();
gs.ecs.register::<HungerClock>();
gs.ecs.register::<ProvidesFood>();
gs.ecs.register::<MagicMapper>();
gs.ecs.register::<Hidden>();
gs.ecs.register::<EntryTrigger>();
gs.ecs.register::<EntityMoved>();
gs.ecs.register::<SingleActivation>();
gs.ecs.insert(SimpleMarkerAllocator::<SerializeMe>::new());
gs.ecs.insert(Map::new(1));
gs.ecs.insert(Point::new(0, 0));
gs.ecs.insert(rltk::RandomNumberGenerator::new());
let player_entity = spawner::player(&mut gs.ecs, 0, 0);
gs.ecs.insert(player_entity);
gs.ecs.insert(RunState::MapGeneration{} );
gs.ecs.insert(gamelog::GameLog{ entries : vec!["Welcome to Rusty Roguelike".to_string()] });
gs.ecs.insert(particle_system::ParticleBuilder::new());
gs.ecs.insert(rex_assets::RexAssets::new());
gs.generate_world_map(1);
rltk::main_loop(context, gs);
}
| 39.073375 | 156 | 0.550542 |
b96be5f0d64397230da7acc4315c4b62165b0101 | 3,031 | // Copyright 2022 pyke.io
// 2019-2021 Tauri Programme within The Commons Conservancy
// [https://tauri.studio/]
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::{
cmp::Ordering,
env::current_dir,
ffi::OsStr,
fs::FileType,
path::{Path, PathBuf}
};
use ignore::WalkBuilder;
use once_cell::sync::Lazy;
const MILLENNIUM_GITIGNORE: &[u8] = include_bytes!("../../millennium.gitignore");
fn lookup<F: Fn(&PathBuf, FileType) -> bool>(dir: &Path, checker: F) -> Option<PathBuf> {
let mut default_gitignore = std::env::temp_dir();
default_gitignore.push(".gitignore");
if !default_gitignore.exists() {
if let Ok(mut file) = std::fs::File::create(default_gitignore.clone()) {
use std::io::Write;
let _ = file.write_all(MILLENNIUM_GITIGNORE);
}
}
let mut builder = WalkBuilder::new(dir);
let _ = builder.add_ignore(default_gitignore);
builder
.require_git(false)
.ignore(false)
.max_depth(Some(
std::env::var("MILLENNIUM_PATH_DEPTH")
.map(|d| {
d.parse()
.expect("`MILLENNIUM_PATH_DEPTH` environment variable must be a positive integer")
})
.unwrap_or(3)
))
.sort_by_file_path(|a, _| if a.extension().is_some() { Ordering::Less } else { Ordering::Greater });
for entry in builder.build().flatten() {
let path = dir.join(entry.path());
if checker(&path, entry.file_type().unwrap()) {
return Some(path);
}
}
None
}
fn get_millennium_dir() -> PathBuf {
lookup(¤t_dir().expect("failed to read cwd"), |path, file_type| {
if file_type.is_dir() {
path.join(".millenniumrc").exists() || path.join(".millenniumrc.json").exists()
} else if let Some(file_name) = path.file_name() {
file_name == OsStr::new(".millenniumrc") || file_name == OsStr::new(".millenniumrc.json")
} else {
false
}
})
.map(|p| if p.is_dir() { p } else { p.parent().unwrap().to_path_buf() })
.expect("Couldn't recognize the current folder as a Millennium project. It must contain a `.millenniumrc` or `.millenniumrc.json` file in any subfolder.")
}
fn get_app_dir() -> Option<PathBuf> {
lookup(¤t_dir().expect("failed to read cwd"), |path, _| {
if let Some(file_name) = path.file_name() {
file_name == OsStr::new("package.json")
} else {
false
}
})
.map(|p| p.parent().unwrap().to_path_buf())
}
pub fn app_dir() -> &'static PathBuf {
static APP_DIR: Lazy<PathBuf> = Lazy::new(|| get_app_dir().unwrap_or_else(get_millennium_dir));
&APP_DIR
}
pub fn millennium_dir() -> PathBuf {
get_millennium_dir()
}
| 31.247423 | 155 | 0.677334 |
f4255ff9422bfe20c676e5c063e2e4a554e7cc6f | 453 | use crate::enums::DespawnType;
use crate::types::Mob;
/// A mob despawned
/// This is used when a powerup despawns
/// and when a missile despawns without
/// hitting anything. It does not cause
/// an explosion to be shown at the location.
#[derive(Copy, Clone, Debug)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct MobDespawn {
pub id: Mob,
#[cfg_attr(feature = "serde", serde(rename = "type"))]
pub ty: DespawnType,
}
| 28.3125 | 62 | 0.704194 |
87cdc3065ac6fc268504a283a192f7a2ab5e3f75 | 5,531 | use std::fmt::{Display, Formatter, Result as FmtResult};
use std::hash::{Hash, Hasher};
use libeir_util_number::{cast, BigInt, Float, NumCast, Number};
use libeir_util_binary::BitVec;
use libeir_intern::Symbol;
use super::float::raw_double_bits;
use super::Integer;
impl From<Integer> for AtomicTerm {
fn from(int: Integer) -> AtomicTerm {
match int {
Integer::Big(n) => AtomicTerm::BigInt(BigIntTerm(n)),
Integer::Small(n) => AtomicTerm::Int(IntTerm(n)),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct BigIntTerm(pub BigInt);
impl BigIntTerm {
#[inline]
pub fn value(&self) -> &BigInt {
&self.0
}
}
impl Into<BigInt> for BigIntTerm {
#[inline]
fn into(self) -> BigInt {
self.0
}
}
impl From<BigIntTerm> for AtomicTerm {
fn from(data: BigIntTerm) -> Self {
AtomicTerm::BigInt(data)
}
}
impl From<BigInt> for AtomicTerm {
fn from(data: BigInt) -> Self {
AtomicTerm::BigInt(BigIntTerm(data))
}
}
impl Display for BigIntTerm {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
write!(fmt, "{}", self.0)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct IntTerm(pub i64);
impl IntTerm {
#[inline]
pub fn value(&self) -> i64 {
self.0
}
}
impl Into<i64> for IntTerm {
#[inline]
fn into(self) -> i64 {
self.0
}
}
impl From<IntTerm> for AtomicTerm {
fn from(data: IntTerm) -> Self {
AtomicTerm::Int(data)
}
}
impl Display for IntTerm {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
write!(fmt, "{}", self.0)
}
}
fn from_num<N>(n: N) -> AtomicTerm
where
N: NumCast,
{
if let Some(int) = cast(n) {
AtomicTerm::Int(IntTerm(int))
} else {
// TODO bigint
unimplemented!()
}
}
macro_rules! impl_from_num {
($typ:ty) => {
impl From<$typ> for AtomicTerm {
fn from(data: $typ) -> Self {
from_num(data)
}
}
};
}
impl_from_num!(usize);
impl_from_num!(i32);
impl_from_num!(i64);
impl_from_num!(u32);
impl_from_num!(u64);
impl From<char> for AtomicTerm {
fn from(data: char) -> Self {
from_num(data as i64)
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct FloatTerm(pub Float);
impl FloatTerm {
#[inline]
pub fn value(&self) -> f64 {
self.0.inner()
}
}
impl Eq for FloatTerm {}
#[allow(clippy::derive_hash_xor_eq)]
impl Hash for FloatTerm {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
raw_double_bits(&self.value()).hash(state)
}
}
impl From<FloatTerm> for AtomicTerm {
fn from(data: FloatTerm) -> Self {
AtomicTerm::Float(data)
}
}
impl From<f64> for AtomicTerm {
fn from(data: f64) -> Self {
AtomicTerm::Float(FloatTerm(Float::new(data).unwrap()))
}
}
impl From<Float> for AtomicTerm {
fn from(data: Float) -> Self {
AtomicTerm::Float(FloatTerm(data))
}
}
impl From<Number> for AtomicTerm {
fn from(data: Number) -> Self {
match data {
Number::Float(float) => float.into(),
Number::Integer(int) => int.into(),
}
}
}
impl Display for FloatTerm {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
write!(fmt, "f{}", self.0)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct AtomTerm(pub Symbol);
impl From<AtomTerm> for AtomicTerm {
fn from(data: AtomTerm) -> Self {
AtomicTerm::Atom(data)
}
}
impl From<Symbol> for AtomicTerm {
fn from(data: Symbol) -> Self {
AtomTerm(data).into()
}
}
impl From<bool> for AtomicTerm {
fn from(data: bool) -> Self {
let sym = if data {
Symbol::intern("true")
} else {
Symbol::intern("false")
};
AtomTerm(sym).into()
}
}
impl PartialEq<str> for AtomTerm {
fn eq(&self, other: &str) -> bool {
self.0 == other
}
}
impl Display for AtomTerm {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
write!(fmt, "a'{}'", self.0) // TODO escape
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct BinaryTerm(pub BitVec);
impl BinaryTerm {
#[inline]
pub fn value(&self) -> &BitVec {
&self.0
}
}
impl From<BinaryTerm> for AtomicTerm {
fn from(data: BinaryTerm) -> Self {
AtomicTerm::Binary(data)
}
}
impl From<Vec<u8>> for AtomicTerm {
fn from(data: Vec<u8>) -> Self {
AtomicTerm::Binary(BinaryTerm(data.into()))
}
}
impl From<BitVec> for AtomicTerm {
fn from(data: BitVec) -> Self {
AtomicTerm::Binary(BinaryTerm(data))
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct NilTerm;
impl From<NilTerm> for AtomicTerm {
fn from(_data: NilTerm) -> Self {
AtomicTerm::Nil
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AtomicTerm {
Int(IntTerm),
BigInt(BigIntTerm),
Float(FloatTerm),
Atom(AtomTerm),
Binary(BinaryTerm),
Nil,
}
impl Display for AtomicTerm {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
match self {
AtomicTerm::Int(int) => write!(fmt, "{}", int),
AtomicTerm::BigInt(int) => write!(fmt, "{}", int),
AtomicTerm::Float(float) => write!(fmt, "{}", float),
AtomicTerm::Atom(atom) => write!(fmt, "{}", atom),
AtomicTerm::Nil => write!(fmt, "[]"),
AtomicTerm::Binary(_bin) => write!(fmt, "bin"),
}
}
}
| 22.761317 | 65 | 0.575484 |
d54e087f496feb21bfd2f34b731681407cb065f4 | 1,938 | use engine_test_support::low_level::{
DeployItemBuilder, ExecuteRequestBuilder, InMemoryWasmTestBuilder, DEFAULT_ACCOUNT_ADDR,
DEFAULT_GENESIS_CONFIG, DEFAULT_PAYMENT,
};
use types::{account::PublicKey, U512};
const CONTRACT_TRANSFER_PURSE_TO_ACCOUNT: &str = "transfer_purse_to_account.wasm";
const ACCOUNT_1_ADDR: [u8; 32] = [1u8; 32];
#[ignore]
#[test]
fn should_run_pos_refund_purse_contract_default_account() {
let mut builder = initialize();
refund_tests(&mut builder, DEFAULT_ACCOUNT_ADDR);
}
#[ignore]
#[test]
fn should_run_pos_refund_purse_contract_account_1() {
let mut builder = initialize();
transfer(&mut builder, ACCOUNT_1_ADDR, *DEFAULT_PAYMENT * 2);
refund_tests(&mut builder, ACCOUNT_1_ADDR);
}
fn initialize() -> InMemoryWasmTestBuilder {
let mut builder = InMemoryWasmTestBuilder::default();
builder.run_genesis(&DEFAULT_GENESIS_CONFIG);
builder
}
fn transfer(builder: &mut InMemoryWasmTestBuilder, address: [u8; 32], amount: U512) {
let exec_request = {
let public_key = PublicKey::new(address);
ExecuteRequestBuilder::standard(
DEFAULT_ACCOUNT_ADDR,
CONTRACT_TRANSFER_PURSE_TO_ACCOUNT,
(public_key, amount),
)
.build()
};
builder.exec(exec_request).expect_success().commit();
}
fn refund_tests(builder: &mut InMemoryWasmTestBuilder, address: [u8; 32]) {
let exec_request = {
let public_key = PublicKey::new(address);
let deploy = DeployItemBuilder::new()
.with_address(address)
.with_deploy_hash([2; 32])
.with_session_code("do_nothing.wasm", ())
.with_payment_code("pos_refund_purse.wasm", (*DEFAULT_PAYMENT,))
.with_authorization_keys(&[public_key])
.build();
ExecuteRequestBuilder::new().push_deploy(deploy).build()
};
builder.exec(exec_request).expect_success().commit();
}
| 30.28125 | 92 | 0.688854 |
8fcc8a7786715c0870d703da47838c708507ffbf | 119 | //! Provides support to work with data stored on the NEAR blockchain
pub mod numbers;
mod object;
pub use object::*;
| 17 | 68 | 0.739496 |
fec7075573c7c41407240866ead7433e3b3e25e5 | 3,297 | //#[macro_use]
extern crate erlang_nif_sys;
extern crate sled;
use erlang_nif_sys::*;
use sled::Db;
use std::{mem, ptr, path::Path, str, slice};
//use std::cmp::min;
use std::sync::atomic::{AtomicIsize, Ordering};
static mut SLEDDB_TYPE: *const ErlNifResourceType = 0 as *const ErlNifResourceType;
static mut DTOR_COUNTER: Option<AtomicIsize> = None;
nif_init!("mynifmod",
[
("open", 1, slice_args!(open)),
("put", 3, slice_args!(put)),
("get", 2, slice_args!(get)),
("sleddb_dtor_count", 0, sleddb_dtor_count)
],
{load: esled_load});
unsafe fn esled_load(env: *mut ErlNifEnv,
_priv_data: *mut *mut c_void,
_load_info: ERL_NIF_TERM) -> c_int {
let mut tried: ErlNifResourceFlags = mem::uninitialized();
DTOR_COUNTER = Some(AtomicIsize::new(0));
SLEDDB_TYPE = enif_open_resource_type(
env,
ptr::null(),
b"sleddb\0".as_ptr(),
Some(sleddb_destructor),
ErlNifResourceFlags::ERL_NIF_RT_CREATE,
&mut tried);
SLEDDB_TYPE.is_null() as c_int
}
unsafe extern "C" fn sleddb_destructor(_env: *mut ErlNifEnv, handle: *mut c_void) {
DTOR_COUNTER.as_mut().unwrap().fetch_add(1, Ordering::SeqCst);
let db = ptr::read(handle as *mut Db);
db.flush();
}
fn open(env: *mut ErlNifEnv, args: &[ERL_NIF_TERM]) -> ERL_NIF_TERM {
let db = match args.len() {
1 => {
let path = bin_to_slice(env, args[0]);
let path = str::from_utf8(path).unwrap();
match Db::start_default(Path::new(path)) {
Ok(db) => {
db
}
Err(_) => { // improve this
return unsafe { enif_make_badarg(env) }
}
}
}
_ => {
return unsafe { enif_make_badarg(env) }
}
};
unsafe {
let mem = enif_alloc_resource(SLEDDB_TYPE, mem::size_of::<Db>());
assert_eq!(mem as usize % mem::align_of::<Db>(), 0);
ptr::write(mem as *mut Db, db);
let term = enif_make_resource(env, mem);
enif_release_resource(mem);
term
}
}
fn put(env: *mut ErlNifEnv, args: &[ERL_NIF_TERM]) -> ERL_NIF_TERM {
let (db, key, value) = match args.len() {
3 => {
let d: &Db = mem::unintialized();
let d = unsafe { enif_get_resource(env, args[0], SLEDDB_TYPE, &d) };
let k = bin_to_slice(env, args[1]);
let v = bin_to_slice(env, args[2]);
(d, k, v)
}
_ => {
return unsafe { enif_make_badarg(env) }
}
}
match db.set(key, value) {
Ok(_) =>
return atom ok
}
fn get(env: *mut ErlNifEnv, args: &[ERL_NIF_TERM]) -> ERL_NIF_TERM {
}
unsafe fn sleddb_dtor_count(env: *mut ErlNifEnv, _: c_int, _: *const ERL_NIF_TERM) -> ERL_NIF_TERM {
let cnt = DTOR_COUNTER.as_mut().unwrap().load(Ordering::SeqCst);
enif_make_int(env, cnt as i32)
}
fn bin_to_slice<'a>(env: *mut ErlNifEnv, term: ERL_NIF_TERM) -> &'a [u8] {
unsafe {
let mut bin: ErlNifBinary = mem::uninitialized();
enif_inspect_binary(env, term, &mut bin);
slice::from_raw_parts(bin.data, bin.size)
}
}
| 31.103774 | 100 | 0.559296 |
e28f3984f84d805269cb256a8e9b641d59e212af | 91 | extern crate lalrpop;
fn main() {
lalrpop::process_root_unconditionally().unwrap();
}
| 15.166667 | 53 | 0.714286 |
bf7c5d60a325d07e478db94fab7dc28b94309b24 | 3,471 | use core::convert::TryFrom;
use core::mem::MaybeUninit;
use core::task::Poll;
use embassy::waitqueue::AtomicWaker;
use futures::future::poll_fn;
use num_enum::{IntoPrimitive, TryFromPrimitive};
use crate::pac::interrupt;
use crate::raw;
use crate::RawError;
static SWI2_WAKER: AtomicWaker = AtomicWaker::new();
#[rustfmt::skip]
#[repr(u32)]
#[derive(Debug, PartialEq, Eq, Clone, Copy, IntoPrimitive, TryFromPrimitive)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
enum SocEvent {
Hfclkstarted = raw::NRF_SOC_EVTS_NRF_EVT_HFCLKSTARTED,
PowerFailureWarning = raw::NRF_SOC_EVTS_NRF_EVT_POWER_FAILURE_WARNING,
FlashOperationSuccess = raw::NRF_SOC_EVTS_NRF_EVT_FLASH_OPERATION_SUCCESS,
FlashOperationError = raw::NRF_SOC_EVTS_NRF_EVT_FLASH_OPERATION_ERROR,
RadioBlocked = raw::NRF_SOC_EVTS_NRF_EVT_RADIO_BLOCKED,
RadioCanceled = raw::NRF_SOC_EVTS_NRF_EVT_RADIO_CANCELED,
RadioSignalCallbackInvalidReturn = raw::NRF_SOC_EVTS_NRF_EVT_RADIO_SIGNAL_CALLBACK_INVALID_RETURN,
RadioSessionIdle = raw::NRF_SOC_EVTS_NRF_EVT_RADIO_SESSION_IDLE,
RadioSessionClosed = raw::NRF_SOC_EVTS_NRF_EVT_RADIO_SESSION_CLOSED,
#[cfg(any(feature="s113", feature="s122", feature="s140"))]
PowerUsbPowerReady = raw::NRF_SOC_EVTS_NRF_EVT_POWER_USB_POWER_READY,
#[cfg(any(feature="s113", feature="s122", feature="s140"))]
PowerUsbDetected = raw::NRF_SOC_EVTS_NRF_EVT_POWER_USB_DETECTED,
#[cfg(any(feature="s113", feature="s122", feature="s140"))]
PowerUsbRemoved = raw::NRF_SOC_EVTS_NRF_EVT_POWER_USB_REMOVED,
}
fn on_soc_evt(evt: u32) {
let evt = match SocEvent::try_from(evt) {
Ok(evt) => evt,
Err(_) => panic!("Unknown soc evt {:?}", evt),
};
info!("soc evt {:?}", evt);
match evt {
SocEvent::FlashOperationError => crate::flash::on_flash_error(),
SocEvent::FlashOperationSuccess => crate::flash::on_flash_success(),
_ => {}
}
}
// TODO actually derive this from the headers + the ATT_MTU
const BLE_EVT_MAX_SIZE: u16 = 128;
pub(crate) async fn run() -> ! {
poll_fn(|cx| unsafe {
SWI2_WAKER.register(cx.waker());
let mut evt: u32 = 0;
loop {
match RawError::convert(raw::sd_evt_get(&mut evt as _)) {
Ok(()) => on_soc_evt(evt),
Err(RawError::NotFound) => break,
Err(err) => panic!("sd_evt_get err {:?}", err),
}
}
// Using u32 since the buffer has to be aligned to 4
let mut evt: MaybeUninit<[u32; BLE_EVT_MAX_SIZE as usize / 4]> = MaybeUninit::uninit();
loop {
let mut len: u16 = BLE_EVT_MAX_SIZE;
let ret = raw::sd_ble_evt_get(evt.as_mut_ptr() as *mut u8, &mut len as _);
match RawError::convert(ret) {
Ok(()) => crate::ble::on_evt(evt.as_ptr() as *const raw::ble_evt_t),
Err(RawError::NotFound) => break,
Err(RawError::BleNotEnabled) => break,
Err(RawError::NoMem) => panic!("BUG: BLE_EVT_MAX_SIZE is too low"),
Err(err) => panic!("sd_ble_evt_get err {:?}", err),
}
}
Poll::Pending
})
.await
}
#[cfg(any(feature = "nrf52805", feature = "nrf52810", feature = "nrf52811"))]
#[interrupt]
unsafe fn SWI2() {
SWI2_WAKER.wake();
}
#[cfg(not(any(feature = "nrf52805", feature = "nrf52810", feature = "nrf52811")))]
#[interrupt]
unsafe fn SWI2_EGU2() {
SWI2_WAKER.wake();
}
| 35.783505 | 102 | 0.651685 |
89ce3759fd3de8cb77b63aa9ed4214901843cdac | 254,506 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn serialize_structure_crate_input_associate_certificate_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::AssociateCertificateInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1) = &input.arn {
object.key("arn").string(var_1.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_input_create_job_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::CreateJobInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_2) = &input.acceleration_settings {
let mut object_3 = object.key("accelerationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_acceleration_settings(
&mut object_3,
var_2,
)?;
object_3.finish();
}
if let Some(var_4) = &input.billing_tags_source {
object.key("billingTagsSource").string(var_4.as_str());
}
if let Some(var_5) = &input.client_request_token {
object.key("clientRequestToken").string(var_5.as_str());
}
if let Some(var_6) = &input.hop_destinations {
let mut array_7 = object.key("hopDestinations").start_array();
for item_8 in var_6 {
{
let mut object_9 = array_7.value().start_object();
crate::json_ser::serialize_structure_crate_model_hop_destination(
&mut object_9,
item_8,
)?;
object_9.finish();
}
}
array_7.finish();
}
if let Some(var_10) = &input.job_template {
object.key("jobTemplate").string(var_10.as_str());
}
if input.priority != 0 {
object.key("priority").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.priority).into()),
);
}
if let Some(var_11) = &input.queue {
object.key("queue").string(var_11.as_str());
}
if let Some(var_12) = &input.role {
object.key("role").string(var_12.as_str());
}
if let Some(var_13) = &input.settings {
let mut object_14 = object.key("settings").start_object();
crate::json_ser::serialize_structure_crate_model_job_settings(&mut object_14, var_13)?;
object_14.finish();
}
if let Some(var_15) = &input.simulate_reserved_queue {
object.key("simulateReservedQueue").string(var_15.as_str());
}
if let Some(var_16) = &input.status_update_interval {
object.key("statusUpdateInterval").string(var_16.as_str());
}
if let Some(var_17) = &input.tags {
let mut object_18 = object.key("tags").start_object();
for (key_19, value_20) in var_17 {
{
object_18.key(key_19).string(value_20.as_str());
}
}
object_18.finish();
}
if let Some(var_21) = &input.user_metadata {
let mut object_22 = object.key("userMetadata").start_object();
for (key_23, value_24) in var_21 {
{
object_22.key(key_23).string(value_24.as_str());
}
}
object_22.finish();
}
Ok(())
}
pub fn serialize_structure_crate_input_create_job_template_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::CreateJobTemplateInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_25) = &input.acceleration_settings {
let mut object_26 = object.key("accelerationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_acceleration_settings(
&mut object_26,
var_25,
)?;
object_26.finish();
}
if let Some(var_27) = &input.category {
object.key("category").string(var_27.as_str());
}
if let Some(var_28) = &input.description {
object.key("description").string(var_28.as_str());
}
if let Some(var_29) = &input.hop_destinations {
let mut array_30 = object.key("hopDestinations").start_array();
for item_31 in var_29 {
{
let mut object_32 = array_30.value().start_object();
crate::json_ser::serialize_structure_crate_model_hop_destination(
&mut object_32,
item_31,
)?;
object_32.finish();
}
}
array_30.finish();
}
if let Some(var_33) = &input.name {
object.key("name").string(var_33.as_str());
}
if input.priority != 0 {
object.key("priority").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.priority).into()),
);
}
if let Some(var_34) = &input.queue {
object.key("queue").string(var_34.as_str());
}
if let Some(var_35) = &input.settings {
let mut object_36 = object.key("settings").start_object();
crate::json_ser::serialize_structure_crate_model_job_template_settings(
&mut object_36,
var_35,
)?;
object_36.finish();
}
if let Some(var_37) = &input.status_update_interval {
object.key("statusUpdateInterval").string(var_37.as_str());
}
if let Some(var_38) = &input.tags {
let mut object_39 = object.key("tags").start_object();
for (key_40, value_41) in var_38 {
{
object_39.key(key_40).string(value_41.as_str());
}
}
object_39.finish();
}
Ok(())
}
pub fn serialize_structure_crate_input_create_preset_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::CreatePresetInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_42) = &input.category {
object.key("category").string(var_42.as_str());
}
if let Some(var_43) = &input.description {
object.key("description").string(var_43.as_str());
}
if let Some(var_44) = &input.name {
object.key("name").string(var_44.as_str());
}
if let Some(var_45) = &input.settings {
let mut object_46 = object.key("settings").start_object();
crate::json_ser::serialize_structure_crate_model_preset_settings(&mut object_46, var_45)?;
object_46.finish();
}
if let Some(var_47) = &input.tags {
let mut object_48 = object.key("tags").start_object();
for (key_49, value_50) in var_47 {
{
object_48.key(key_49).string(value_50.as_str());
}
}
object_48.finish();
}
Ok(())
}
pub fn serialize_structure_crate_input_create_queue_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::CreateQueueInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_51) = &input.description {
object.key("description").string(var_51.as_str());
}
if let Some(var_52) = &input.name {
object.key("name").string(var_52.as_str());
}
if let Some(var_53) = &input.pricing_plan {
object.key("pricingPlan").string(var_53.as_str());
}
if let Some(var_54) = &input.reservation_plan_settings {
let mut object_55 = object.key("reservationPlanSettings").start_object();
crate::json_ser::serialize_structure_crate_model_reservation_plan_settings(
&mut object_55,
var_54,
)?;
object_55.finish();
}
if let Some(var_56) = &input.status {
object.key("status").string(var_56.as_str());
}
if let Some(var_57) = &input.tags {
let mut object_58 = object.key("tags").start_object();
for (key_59, value_60) in var_57 {
{
object_58.key(key_59).string(value_60.as_str());
}
}
object_58.finish();
}
Ok(())
}
pub fn serialize_structure_crate_input_describe_endpoints_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::DescribeEndpointsInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.max_results != 0 {
object.key("maxResults").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_results).into()),
);
}
if let Some(var_61) = &input.mode {
object.key("mode").string(var_61.as_str());
}
if let Some(var_62) = &input.next_token {
object.key("nextToken").string(var_62.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_input_put_policy_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PutPolicyInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_63) = &input.policy {
let mut object_64 = object.key("policy").start_object();
crate::json_ser::serialize_structure_crate_model_policy(&mut object_64, var_63)?;
object_64.finish();
}
Ok(())
}
pub fn serialize_structure_crate_input_tag_resource_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::TagResourceInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_65) = &input.arn {
object.key("arn").string(var_65.as_str());
}
if let Some(var_66) = &input.tags {
let mut object_67 = object.key("tags").start_object();
for (key_68, value_69) in var_66 {
{
object_67.key(key_68).string(value_69.as_str());
}
}
object_67.finish();
}
Ok(())
}
pub fn serialize_structure_crate_input_untag_resource_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UntagResourceInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_70) = &input.tag_keys {
let mut array_71 = object.key("tagKeys").start_array();
for item_72 in var_70 {
{
array_71.value().string(item_72.as_str());
}
}
array_71.finish();
}
Ok(())
}
pub fn serialize_structure_crate_input_update_job_template_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UpdateJobTemplateInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_73) = &input.acceleration_settings {
let mut object_74 = object.key("accelerationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_acceleration_settings(
&mut object_74,
var_73,
)?;
object_74.finish();
}
if let Some(var_75) = &input.category {
object.key("category").string(var_75.as_str());
}
if let Some(var_76) = &input.description {
object.key("description").string(var_76.as_str());
}
if let Some(var_77) = &input.hop_destinations {
let mut array_78 = object.key("hopDestinations").start_array();
for item_79 in var_77 {
{
let mut object_80 = array_78.value().start_object();
crate::json_ser::serialize_structure_crate_model_hop_destination(
&mut object_80,
item_79,
)?;
object_80.finish();
}
}
array_78.finish();
}
if input.priority != 0 {
object.key("priority").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.priority).into()),
);
}
if let Some(var_81) = &input.queue {
object.key("queue").string(var_81.as_str());
}
if let Some(var_82) = &input.settings {
let mut object_83 = object.key("settings").start_object();
crate::json_ser::serialize_structure_crate_model_job_template_settings(
&mut object_83,
var_82,
)?;
object_83.finish();
}
if let Some(var_84) = &input.status_update_interval {
object.key("statusUpdateInterval").string(var_84.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_input_update_preset_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UpdatePresetInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_85) = &input.category {
object.key("category").string(var_85.as_str());
}
if let Some(var_86) = &input.description {
object.key("description").string(var_86.as_str());
}
if let Some(var_87) = &input.settings {
let mut object_88 = object.key("settings").start_object();
crate::json_ser::serialize_structure_crate_model_preset_settings(&mut object_88, var_87)?;
object_88.finish();
}
Ok(())
}
pub fn serialize_structure_crate_input_update_queue_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UpdateQueueInput,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_89) = &input.description {
object.key("description").string(var_89.as_str());
}
if let Some(var_90) = &input.reservation_plan_settings {
let mut object_91 = object.key("reservationPlanSettings").start_object();
crate::json_ser::serialize_structure_crate_model_reservation_plan_settings(
&mut object_91,
var_90,
)?;
object_91.finish();
}
if let Some(var_92) = &input.status {
object.key("status").string(var_92.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_acceleration_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AccelerationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_93) = &input.mode {
object.key("mode").string(var_93.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_hop_destination(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::HopDestination,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.priority != 0 {
object.key("priority").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.priority).into()),
);
}
if let Some(var_94) = &input.queue {
object.key("queue").string(var_94.as_str());
}
if input.wait_minutes != 0 {
object.key("waitMinutes").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.wait_minutes).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_job_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::JobSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.ad_avail_offset != 0 {
object.key("adAvailOffset").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.ad_avail_offset).into()),
);
}
if let Some(var_95) = &input.avail_blanking {
let mut object_96 = object.key("availBlanking").start_object();
crate::json_ser::serialize_structure_crate_model_avail_blanking(&mut object_96, var_95)?;
object_96.finish();
}
if let Some(var_97) = &input.esam {
let mut object_98 = object.key("esam").start_object();
crate::json_ser::serialize_structure_crate_model_esam_settings(&mut object_98, var_97)?;
object_98.finish();
}
if let Some(var_99) = &input.extended_data_services {
let mut object_100 = object.key("extendedDataServices").start_object();
crate::json_ser::serialize_structure_crate_model_extended_data_services(
&mut object_100,
var_99,
)?;
object_100.finish();
}
if let Some(var_101) = &input.inputs {
let mut array_102 = object.key("inputs").start_array();
for item_103 in var_101 {
{
let mut object_104 = array_102.value().start_object();
crate::json_ser::serialize_structure_crate_model_input(&mut object_104, item_103)?;
object_104.finish();
}
}
array_102.finish();
}
if let Some(var_105) = &input.kantar_watermark {
let mut object_106 = object.key("kantarWatermark").start_object();
crate::json_ser::serialize_structure_crate_model_kantar_watermark_settings(
&mut object_106,
var_105,
)?;
object_106.finish();
}
if let Some(var_107) = &input.motion_image_inserter {
let mut object_108 = object.key("motionImageInserter").start_object();
crate::json_ser::serialize_structure_crate_model_motion_image_inserter(
&mut object_108,
var_107,
)?;
object_108.finish();
}
if let Some(var_109) = &input.nielsen_configuration {
let mut object_110 = object.key("nielsenConfiguration").start_object();
crate::json_ser::serialize_structure_crate_model_nielsen_configuration(
&mut object_110,
var_109,
)?;
object_110.finish();
}
if let Some(var_111) = &input.nielsen_non_linear_watermark {
let mut object_112 = object.key("nielsenNonLinearWatermark").start_object();
crate::json_ser::serialize_structure_crate_model_nielsen_non_linear_watermark_settings(
&mut object_112,
var_111,
)?;
object_112.finish();
}
if let Some(var_113) = &input.output_groups {
let mut array_114 = object.key("outputGroups").start_array();
for item_115 in var_113 {
{
let mut object_116 = array_114.value().start_object();
crate::json_ser::serialize_structure_crate_model_output_group(
&mut object_116,
item_115,
)?;
object_116.finish();
}
}
array_114.finish();
}
if let Some(var_117) = &input.timecode_config {
let mut object_118 = object.key("timecodeConfig").start_object();
crate::json_ser::serialize_structure_crate_model_timecode_config(&mut object_118, var_117)?;
object_118.finish();
}
if let Some(var_119) = &input.timed_metadata_insertion {
let mut object_120 = object.key("timedMetadataInsertion").start_object();
crate::json_ser::serialize_structure_crate_model_timed_metadata_insertion(
&mut object_120,
var_119,
)?;
object_120.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_job_template_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::JobTemplateSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.ad_avail_offset != 0 {
object.key("adAvailOffset").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.ad_avail_offset).into()),
);
}
if let Some(var_121) = &input.avail_blanking {
let mut object_122 = object.key("availBlanking").start_object();
crate::json_ser::serialize_structure_crate_model_avail_blanking(&mut object_122, var_121)?;
object_122.finish();
}
if let Some(var_123) = &input.esam {
let mut object_124 = object.key("esam").start_object();
crate::json_ser::serialize_structure_crate_model_esam_settings(&mut object_124, var_123)?;
object_124.finish();
}
if let Some(var_125) = &input.extended_data_services {
let mut object_126 = object.key("extendedDataServices").start_object();
crate::json_ser::serialize_structure_crate_model_extended_data_services(
&mut object_126,
var_125,
)?;
object_126.finish();
}
if let Some(var_127) = &input.inputs {
let mut array_128 = object.key("inputs").start_array();
for item_129 in var_127 {
{
let mut object_130 = array_128.value().start_object();
crate::json_ser::serialize_structure_crate_model_input_template(
&mut object_130,
item_129,
)?;
object_130.finish();
}
}
array_128.finish();
}
if let Some(var_131) = &input.kantar_watermark {
let mut object_132 = object.key("kantarWatermark").start_object();
crate::json_ser::serialize_structure_crate_model_kantar_watermark_settings(
&mut object_132,
var_131,
)?;
object_132.finish();
}
if let Some(var_133) = &input.motion_image_inserter {
let mut object_134 = object.key("motionImageInserter").start_object();
crate::json_ser::serialize_structure_crate_model_motion_image_inserter(
&mut object_134,
var_133,
)?;
object_134.finish();
}
if let Some(var_135) = &input.nielsen_configuration {
let mut object_136 = object.key("nielsenConfiguration").start_object();
crate::json_ser::serialize_structure_crate_model_nielsen_configuration(
&mut object_136,
var_135,
)?;
object_136.finish();
}
if let Some(var_137) = &input.nielsen_non_linear_watermark {
let mut object_138 = object.key("nielsenNonLinearWatermark").start_object();
crate::json_ser::serialize_structure_crate_model_nielsen_non_linear_watermark_settings(
&mut object_138,
var_137,
)?;
object_138.finish();
}
if let Some(var_139) = &input.output_groups {
let mut array_140 = object.key("outputGroups").start_array();
for item_141 in var_139 {
{
let mut object_142 = array_140.value().start_object();
crate::json_ser::serialize_structure_crate_model_output_group(
&mut object_142,
item_141,
)?;
object_142.finish();
}
}
array_140.finish();
}
if let Some(var_143) = &input.timecode_config {
let mut object_144 = object.key("timecodeConfig").start_object();
crate::json_ser::serialize_structure_crate_model_timecode_config(&mut object_144, var_143)?;
object_144.finish();
}
if let Some(var_145) = &input.timed_metadata_insertion {
let mut object_146 = object.key("timedMetadataInsertion").start_object();
crate::json_ser::serialize_structure_crate_model_timed_metadata_insertion(
&mut object_146,
var_145,
)?;
object_146.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_preset_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::PresetSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_147) = &input.audio_descriptions {
let mut array_148 = object.key("audioDescriptions").start_array();
for item_149 in var_147 {
{
let mut object_150 = array_148.value().start_object();
crate::json_ser::serialize_structure_crate_model_audio_description(
&mut object_150,
item_149,
)?;
object_150.finish();
}
}
array_148.finish();
}
if let Some(var_151) = &input.caption_descriptions {
let mut array_152 = object.key("captionDescriptions").start_array();
for item_153 in var_151 {
{
let mut object_154 = array_152.value().start_object();
crate::json_ser::serialize_structure_crate_model_caption_description_preset(
&mut object_154,
item_153,
)?;
object_154.finish();
}
}
array_152.finish();
}
if let Some(var_155) = &input.container_settings {
let mut object_156 = object.key("containerSettings").start_object();
crate::json_ser::serialize_structure_crate_model_container_settings(
&mut object_156,
var_155,
)?;
object_156.finish();
}
if let Some(var_157) = &input.video_description {
let mut object_158 = object.key("videoDescription").start_object();
crate::json_ser::serialize_structure_crate_model_video_description(
&mut object_158,
var_157,
)?;
object_158.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_reservation_plan_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ReservationPlanSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_159) = &input.commitment {
object.key("commitment").string(var_159.as_str());
}
if let Some(var_160) = &input.renewal_type {
object.key("renewalType").string(var_160.as_str());
}
{
object.key("reservedSlots").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.reserved_slots).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_policy(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Policy,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_161) = &input.http_inputs {
object.key("httpInputs").string(var_161.as_str());
}
if let Some(var_162) = &input.https_inputs {
object.key("httpsInputs").string(var_162.as_str());
}
if let Some(var_163) = &input.s3_inputs {
object.key("s3Inputs").string(var_163.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_avail_blanking(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AvailBlanking,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_164) = &input.avail_blanking_image {
object.key("availBlankingImage").string(var_164.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_esam_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::EsamSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_165) = &input.manifest_confirm_condition_notification {
let mut object_166 = object
.key("manifestConfirmConditionNotification")
.start_object();
crate::json_ser::serialize_structure_crate_model_esam_manifest_confirm_condition_notification(&mut object_166, var_165)?;
object_166.finish();
}
if input.response_signal_preroll != 0 {
object.key("responseSignalPreroll").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.response_signal_preroll).into()),
);
}
if let Some(var_167) = &input.signal_processing_notification {
let mut object_168 = object.key("signalProcessingNotification").start_object();
crate::json_ser::serialize_structure_crate_model_esam_signal_processing_notification(
&mut object_168,
var_167,
)?;
object_168.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_extended_data_services(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ExtendedDataServices,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_169) = &input.copy_protection_action {
object.key("copyProtectionAction").string(var_169.as_str());
}
if let Some(var_170) = &input.vchip_action {
object.key("vchipAction").string(var_170.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_input(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Input,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_171) = &input.audio_selector_groups {
let mut object_172 = object.key("audioSelectorGroups").start_object();
for (key_173, value_174) in var_171 {
{
let mut object_175 = object_172.key(key_173).start_object();
crate::json_ser::serialize_structure_crate_model_audio_selector_group(
&mut object_175,
value_174,
)?;
object_175.finish();
}
}
object_172.finish();
}
if let Some(var_176) = &input.audio_selectors {
let mut object_177 = object.key("audioSelectors").start_object();
for (key_178, value_179) in var_176 {
{
let mut object_180 = object_177.key(key_178).start_object();
crate::json_ser::serialize_structure_crate_model_audio_selector(
&mut object_180,
value_179,
)?;
object_180.finish();
}
}
object_177.finish();
}
if let Some(var_181) = &input.caption_selectors {
let mut object_182 = object.key("captionSelectors").start_object();
for (key_183, value_184) in var_181 {
{
let mut object_185 = object_182.key(key_183).start_object();
crate::json_ser::serialize_structure_crate_model_caption_selector(
&mut object_185,
value_184,
)?;
object_185.finish();
}
}
object_182.finish();
}
if let Some(var_186) = &input.crop {
let mut object_187 = object.key("crop").start_object();
crate::json_ser::serialize_structure_crate_model_rectangle(&mut object_187, var_186)?;
object_187.finish();
}
if let Some(var_188) = &input.deblock_filter {
object.key("deblockFilter").string(var_188.as_str());
}
if let Some(var_189) = &input.decryption_settings {
let mut object_190 = object.key("decryptionSettings").start_object();
crate::json_ser::serialize_structure_crate_model_input_decryption_settings(
&mut object_190,
var_189,
)?;
object_190.finish();
}
if let Some(var_191) = &input.denoise_filter {
object.key("denoiseFilter").string(var_191.as_str());
}
if let Some(var_192) = &input.dolby_vision_metadata_xml {
object
.key("dolbyVisionMetadataXml")
.string(var_192.as_str());
}
if let Some(var_193) = &input.file_input {
object.key("fileInput").string(var_193.as_str());
}
if let Some(var_194) = &input.filter_enable {
object.key("filterEnable").string(var_194.as_str());
}
if input.filter_strength != 0 {
object.key("filterStrength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.filter_strength).into()),
);
}
if let Some(var_195) = &input.image_inserter {
let mut object_196 = object.key("imageInserter").start_object();
crate::json_ser::serialize_structure_crate_model_image_inserter(&mut object_196, var_195)?;
object_196.finish();
}
if let Some(var_197) = &input.input_clippings {
let mut array_198 = object.key("inputClippings").start_array();
for item_199 in var_197 {
{
let mut object_200 = array_198.value().start_object();
crate::json_ser::serialize_structure_crate_model_input_clipping(
&mut object_200,
item_199,
)?;
object_200.finish();
}
}
array_198.finish();
}
if let Some(var_201) = &input.input_scan_type {
object.key("inputScanType").string(var_201.as_str());
}
if let Some(var_202) = &input.position {
let mut object_203 = object.key("position").start_object();
crate::json_ser::serialize_structure_crate_model_rectangle(&mut object_203, var_202)?;
object_203.finish();
}
if input.program_number != 0 {
object.key("programNumber").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.program_number).into()),
);
}
if let Some(var_204) = &input.psi_control {
object.key("psiControl").string(var_204.as_str());
}
if let Some(var_205) = &input.supplemental_imps {
let mut array_206 = object.key("supplementalImps").start_array();
for item_207 in var_205 {
{
array_206.value().string(item_207.as_str());
}
}
array_206.finish();
}
if let Some(var_208) = &input.timecode_source {
object.key("timecodeSource").string(var_208.as_str());
}
if let Some(var_209) = &input.timecode_start {
object.key("timecodeStart").string(var_209.as_str());
}
if let Some(var_210) = &input.video_generator {
let mut object_211 = object.key("videoGenerator").start_object();
crate::json_ser::serialize_structure_crate_model_input_video_generator(
&mut object_211,
var_210,
)?;
object_211.finish();
}
if let Some(var_212) = &input.video_selector {
let mut object_213 = object.key("videoSelector").start_object();
crate::json_ser::serialize_structure_crate_model_video_selector(&mut object_213, var_212)?;
object_213.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_kantar_watermark_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::KantarWatermarkSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_214) = &input.channel_name {
object.key("channelName").string(var_214.as_str());
}
if let Some(var_215) = &input.content_reference {
object.key("contentReference").string(var_215.as_str());
}
if let Some(var_216) = &input.credentials_secret_name {
object.key("credentialsSecretName").string(var_216.as_str());
}
if input.file_offset != 0.0 {
object.key("fileOffset").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.file_offset).into()),
);
}
if input.kantar_license_id != 0 {
object.key("kantarLicenseId").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.kantar_license_id).into()),
);
}
if let Some(var_217) = &input.kantar_server_url {
object.key("kantarServerUrl").string(var_217.as_str());
}
if let Some(var_218) = &input.log_destination {
object.key("logDestination").string(var_218.as_str());
}
if let Some(var_219) = &input.metadata3 {
object.key("metadata3").string(var_219.as_str());
}
if let Some(var_220) = &input.metadata4 {
object.key("metadata4").string(var_220.as_str());
}
if let Some(var_221) = &input.metadata5 {
object.key("metadata5").string(var_221.as_str());
}
if let Some(var_222) = &input.metadata6 {
object.key("metadata6").string(var_222.as_str());
}
if let Some(var_223) = &input.metadata7 {
object.key("metadata7").string(var_223.as_str());
}
if let Some(var_224) = &input.metadata8 {
object.key("metadata8").string(var_224.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_motion_image_inserter(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MotionImageInserter,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_225) = &input.framerate {
let mut object_226 = object.key("framerate").start_object();
crate::json_ser::serialize_structure_crate_model_motion_image_insertion_framerate(
&mut object_226,
var_225,
)?;
object_226.finish();
}
if let Some(var_227) = &input.input {
object.key("input").string(var_227.as_str());
}
if let Some(var_228) = &input.insertion_mode {
object.key("insertionMode").string(var_228.as_str());
}
if let Some(var_229) = &input.offset {
let mut object_230 = object.key("offset").start_object();
crate::json_ser::serialize_structure_crate_model_motion_image_insertion_offset(
&mut object_230,
var_229,
)?;
object_230.finish();
}
if let Some(var_231) = &input.playback {
object.key("playback").string(var_231.as_str());
}
if let Some(var_232) = &input.start_time {
object.key("startTime").string(var_232.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_nielsen_configuration(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::NielsenConfiguration,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.breakout_code != 0 {
object.key("breakoutCode").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.breakout_code).into()),
);
}
if let Some(var_233) = &input.distributor_id {
object.key("distributorId").string(var_233.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_nielsen_non_linear_watermark_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::NielsenNonLinearWatermarkSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_234) = &input.active_watermark_process {
object
.key("activeWatermarkProcess")
.string(var_234.as_str());
}
if let Some(var_235) = &input.adi_filename {
object.key("adiFilename").string(var_235.as_str());
}
if let Some(var_236) = &input.asset_id {
object.key("assetId").string(var_236.as_str());
}
if let Some(var_237) = &input.asset_name {
object.key("assetName").string(var_237.as_str());
}
if let Some(var_238) = &input.cbet_source_id {
object.key("cbetSourceId").string(var_238.as_str());
}
if let Some(var_239) = &input.episode_id {
object.key("episodeId").string(var_239.as_str());
}
if let Some(var_240) = &input.metadata_destination {
object.key("metadataDestination").string(var_240.as_str());
}
if input.source_id != 0 {
object.key("sourceId").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.source_id).into()),
);
}
if let Some(var_241) = &input.source_watermark_status {
object.key("sourceWatermarkStatus").string(var_241.as_str());
}
if let Some(var_242) = &input.tic_server_url {
object.key("ticServerUrl").string(var_242.as_str());
}
if let Some(var_243) = &input.unique_tic_per_audio_track {
object
.key("uniqueTicPerAudioTrack")
.string(var_243.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_output_group(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::OutputGroup,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_244) = &input.automated_encoding_settings {
let mut object_245 = object.key("automatedEncodingSettings").start_object();
crate::json_ser::serialize_structure_crate_model_automated_encoding_settings(
&mut object_245,
var_244,
)?;
object_245.finish();
}
if let Some(var_246) = &input.custom_name {
object.key("customName").string(var_246.as_str());
}
if let Some(var_247) = &input.name {
object.key("name").string(var_247.as_str());
}
if let Some(var_248) = &input.output_group_settings {
let mut object_249 = object.key("outputGroupSettings").start_object();
crate::json_ser::serialize_structure_crate_model_output_group_settings(
&mut object_249,
var_248,
)?;
object_249.finish();
}
if let Some(var_250) = &input.outputs {
let mut array_251 = object.key("outputs").start_array();
for item_252 in var_250 {
{
let mut object_253 = array_251.value().start_object();
crate::json_ser::serialize_structure_crate_model_output(&mut object_253, item_252)?;
object_253.finish();
}
}
array_251.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_timecode_config(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::TimecodeConfig,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_254) = &input.anchor {
object.key("anchor").string(var_254.as_str());
}
if let Some(var_255) = &input.source {
object.key("source").string(var_255.as_str());
}
if let Some(var_256) = &input.start {
object.key("start").string(var_256.as_str());
}
if let Some(var_257) = &input.timestamp_offset {
object.key("timestampOffset").string(var_257.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_timed_metadata_insertion(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::TimedMetadataInsertion,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_258) = &input.id3_insertions {
let mut array_259 = object.key("id3Insertions").start_array();
for item_260 in var_258 {
{
let mut object_261 = array_259.value().start_object();
crate::json_ser::serialize_structure_crate_model_id3_insertion(
&mut object_261,
item_260,
)?;
object_261.finish();
}
}
array_259.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_input_template(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::InputTemplate,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_262) = &input.audio_selector_groups {
let mut object_263 = object.key("audioSelectorGroups").start_object();
for (key_264, value_265) in var_262 {
{
let mut object_266 = object_263.key(key_264).start_object();
crate::json_ser::serialize_structure_crate_model_audio_selector_group(
&mut object_266,
value_265,
)?;
object_266.finish();
}
}
object_263.finish();
}
if let Some(var_267) = &input.audio_selectors {
let mut object_268 = object.key("audioSelectors").start_object();
for (key_269, value_270) in var_267 {
{
let mut object_271 = object_268.key(key_269).start_object();
crate::json_ser::serialize_structure_crate_model_audio_selector(
&mut object_271,
value_270,
)?;
object_271.finish();
}
}
object_268.finish();
}
if let Some(var_272) = &input.caption_selectors {
let mut object_273 = object.key("captionSelectors").start_object();
for (key_274, value_275) in var_272 {
{
let mut object_276 = object_273.key(key_274).start_object();
crate::json_ser::serialize_structure_crate_model_caption_selector(
&mut object_276,
value_275,
)?;
object_276.finish();
}
}
object_273.finish();
}
if let Some(var_277) = &input.crop {
let mut object_278 = object.key("crop").start_object();
crate::json_ser::serialize_structure_crate_model_rectangle(&mut object_278, var_277)?;
object_278.finish();
}
if let Some(var_279) = &input.deblock_filter {
object.key("deblockFilter").string(var_279.as_str());
}
if let Some(var_280) = &input.denoise_filter {
object.key("denoiseFilter").string(var_280.as_str());
}
if let Some(var_281) = &input.dolby_vision_metadata_xml {
object
.key("dolbyVisionMetadataXml")
.string(var_281.as_str());
}
if let Some(var_282) = &input.filter_enable {
object.key("filterEnable").string(var_282.as_str());
}
if input.filter_strength != 0 {
object.key("filterStrength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.filter_strength).into()),
);
}
if let Some(var_283) = &input.image_inserter {
let mut object_284 = object.key("imageInserter").start_object();
crate::json_ser::serialize_structure_crate_model_image_inserter(&mut object_284, var_283)?;
object_284.finish();
}
if let Some(var_285) = &input.input_clippings {
let mut array_286 = object.key("inputClippings").start_array();
for item_287 in var_285 {
{
let mut object_288 = array_286.value().start_object();
crate::json_ser::serialize_structure_crate_model_input_clipping(
&mut object_288,
item_287,
)?;
object_288.finish();
}
}
array_286.finish();
}
if let Some(var_289) = &input.input_scan_type {
object.key("inputScanType").string(var_289.as_str());
}
if let Some(var_290) = &input.position {
let mut object_291 = object.key("position").start_object();
crate::json_ser::serialize_structure_crate_model_rectangle(&mut object_291, var_290)?;
object_291.finish();
}
if input.program_number != 0 {
object.key("programNumber").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.program_number).into()),
);
}
if let Some(var_292) = &input.psi_control {
object.key("psiControl").string(var_292.as_str());
}
if let Some(var_293) = &input.timecode_source {
object.key("timecodeSource").string(var_293.as_str());
}
if let Some(var_294) = &input.timecode_start {
object.key("timecodeStart").string(var_294.as_str());
}
if let Some(var_295) = &input.video_selector {
let mut object_296 = object.key("videoSelector").start_object();
crate::json_ser::serialize_structure_crate_model_video_selector(&mut object_296, var_295)?;
object_296.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_audio_description(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AudioDescription,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_297) = &input.audio_channel_tagging_settings {
let mut object_298 = object.key("audioChannelTaggingSettings").start_object();
crate::json_ser::serialize_structure_crate_model_audio_channel_tagging_settings(
&mut object_298,
var_297,
)?;
object_298.finish();
}
if let Some(var_299) = &input.audio_normalization_settings {
let mut object_300 = object.key("audioNormalizationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_audio_normalization_settings(
&mut object_300,
var_299,
)?;
object_300.finish();
}
if let Some(var_301) = &input.audio_source_name {
object.key("audioSourceName").string(var_301.as_str());
}
if input.audio_type != 0 {
object.key("audioType").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.audio_type).into()),
);
}
if let Some(var_302) = &input.audio_type_control {
object.key("audioTypeControl").string(var_302.as_str());
}
if let Some(var_303) = &input.codec_settings {
let mut object_304 = object.key("codecSettings").start_object();
crate::json_ser::serialize_structure_crate_model_audio_codec_settings(
&mut object_304,
var_303,
)?;
object_304.finish();
}
if let Some(var_305) = &input.custom_language_code {
object.key("customLanguageCode").string(var_305.as_str());
}
if let Some(var_306) = &input.language_code {
object.key("languageCode").string(var_306.as_str());
}
if let Some(var_307) = &input.language_code_control {
object.key("languageCodeControl").string(var_307.as_str());
}
if let Some(var_308) = &input.remix_settings {
let mut object_309 = object.key("remixSettings").start_object();
crate::json_ser::serialize_structure_crate_model_remix_settings(&mut object_309, var_308)?;
object_309.finish();
}
if let Some(var_310) = &input.stream_name {
object.key("streamName").string(var_310.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_caption_description_preset(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CaptionDescriptionPreset,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_311) = &input.custom_language_code {
object.key("customLanguageCode").string(var_311.as_str());
}
if let Some(var_312) = &input.destination_settings {
let mut object_313 = object.key("destinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_caption_destination_settings(
&mut object_313,
var_312,
)?;
object_313.finish();
}
if let Some(var_314) = &input.language_code {
object.key("languageCode").string(var_314.as_str());
}
if let Some(var_315) = &input.language_description {
object.key("languageDescription").string(var_315.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_container_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ContainerSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_316) = &input.cmfc_settings {
let mut object_317 = object.key("cmfcSettings").start_object();
crate::json_ser::serialize_structure_crate_model_cmfc_settings(&mut object_317, var_316)?;
object_317.finish();
}
if let Some(var_318) = &input.container {
object.key("container").string(var_318.as_str());
}
if let Some(var_319) = &input.f4v_settings {
let mut object_320 = object.key("f4vSettings").start_object();
crate::json_ser::serialize_structure_crate_model_f4v_settings(&mut object_320, var_319)?;
object_320.finish();
}
if let Some(var_321) = &input.m2ts_settings {
let mut object_322 = object.key("m2tsSettings").start_object();
crate::json_ser::serialize_structure_crate_model_m2ts_settings(&mut object_322, var_321)?;
object_322.finish();
}
if let Some(var_323) = &input.m3u8_settings {
let mut object_324 = object.key("m3u8Settings").start_object();
crate::json_ser::serialize_structure_crate_model_m3u8_settings(&mut object_324, var_323)?;
object_324.finish();
}
if let Some(var_325) = &input.mov_settings {
let mut object_326 = object.key("movSettings").start_object();
crate::json_ser::serialize_structure_crate_model_mov_settings(&mut object_326, var_325)?;
object_326.finish();
}
if let Some(var_327) = &input.mp4_settings {
let mut object_328 = object.key("mp4Settings").start_object();
crate::json_ser::serialize_structure_crate_model_mp4_settings(&mut object_328, var_327)?;
object_328.finish();
}
if let Some(var_329) = &input.mpd_settings {
let mut object_330 = object.key("mpdSettings").start_object();
crate::json_ser::serialize_structure_crate_model_mpd_settings(&mut object_330, var_329)?;
object_330.finish();
}
if let Some(var_331) = &input.mxf_settings {
let mut object_332 = object.key("mxfSettings").start_object();
crate::json_ser::serialize_structure_crate_model_mxf_settings(&mut object_332, var_331)?;
object_332.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_video_description(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::VideoDescription,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_333) = &input.afd_signaling {
object.key("afdSignaling").string(var_333.as_str());
}
if let Some(var_334) = &input.anti_alias {
object.key("antiAlias").string(var_334.as_str());
}
if let Some(var_335) = &input.codec_settings {
let mut object_336 = object.key("codecSettings").start_object();
crate::json_ser::serialize_structure_crate_model_video_codec_settings(
&mut object_336,
var_335,
)?;
object_336.finish();
}
if let Some(var_337) = &input.color_metadata {
object.key("colorMetadata").string(var_337.as_str());
}
if let Some(var_338) = &input.crop {
let mut object_339 = object.key("crop").start_object();
crate::json_ser::serialize_structure_crate_model_rectangle(&mut object_339, var_338)?;
object_339.finish();
}
if let Some(var_340) = &input.drop_frame_timecode {
object.key("dropFrameTimecode").string(var_340.as_str());
}
if input.fixed_afd != 0 {
object.key("fixedAfd").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.fixed_afd).into()),
);
}
if input.height != 0 {
object.key("height").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.height).into()),
);
}
if let Some(var_341) = &input.position {
let mut object_342 = object.key("position").start_object();
crate::json_ser::serialize_structure_crate_model_rectangle(&mut object_342, var_341)?;
object_342.finish();
}
if let Some(var_343) = &input.respond_to_afd {
object.key("respondToAfd").string(var_343.as_str());
}
if let Some(var_344) = &input.scaling_behavior {
object.key("scalingBehavior").string(var_344.as_str());
}
if input.sharpness != 0 {
object.key("sharpness").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sharpness).into()),
);
}
if let Some(var_345) = &input.timecode_insertion {
object.key("timecodeInsertion").string(var_345.as_str());
}
if let Some(var_346) = &input.video_preprocessors {
let mut object_347 = object.key("videoPreprocessors").start_object();
crate::json_ser::serialize_structure_crate_model_video_preprocessor(
&mut object_347,
var_346,
)?;
object_347.finish();
}
if input.width != 0 {
object.key("width").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.width).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_esam_manifest_confirm_condition_notification(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::EsamManifestConfirmConditionNotification,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_348) = &input.mcc_xml {
object.key("mccXml").string(var_348.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_esam_signal_processing_notification(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::EsamSignalProcessingNotification,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_349) = &input.scc_xml {
object.key("sccXml").string(var_349.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_audio_selector_group(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AudioSelectorGroup,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_350) = &input.audio_selector_names {
let mut array_351 = object.key("audioSelectorNames").start_array();
for item_352 in var_350 {
{
array_351.value().string(item_352.as_str());
}
}
array_351.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_audio_selector(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AudioSelector,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_353) = &input.custom_language_code {
object.key("customLanguageCode").string(var_353.as_str());
}
if let Some(var_354) = &input.default_selection {
object.key("defaultSelection").string(var_354.as_str());
}
if let Some(var_355) = &input.external_audio_file_input {
object
.key("externalAudioFileInput")
.string(var_355.as_str());
}
if let Some(var_356) = &input.hls_rendition_group_settings {
let mut object_357 = object.key("hlsRenditionGroupSettings").start_object();
crate::json_ser::serialize_structure_crate_model_hls_rendition_group_settings(
&mut object_357,
var_356,
)?;
object_357.finish();
}
if let Some(var_358) = &input.language_code {
object.key("languageCode").string(var_358.as_str());
}
if input.offset != 0 {
object.key("offset").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.offset).into()),
);
}
if let Some(var_359) = &input.pids {
let mut array_360 = object.key("pids").start_array();
for item_361 in var_359 {
{
array_360.value().number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((*item_361).into()),
);
}
}
array_360.finish();
}
if input.program_selection != 0 {
object.key("programSelection").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.program_selection).into()),
);
}
if let Some(var_362) = &input.remix_settings {
let mut object_363 = object.key("remixSettings").start_object();
crate::json_ser::serialize_structure_crate_model_remix_settings(&mut object_363, var_362)?;
object_363.finish();
}
if let Some(var_364) = &input.selector_type {
object.key("selectorType").string(var_364.as_str());
}
if let Some(var_365) = &input.tracks {
let mut array_366 = object.key("tracks").start_array();
for item_367 in var_365 {
{
array_366.value().number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((*item_367).into()),
);
}
}
array_366.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_caption_selector(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CaptionSelector,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_368) = &input.custom_language_code {
object.key("customLanguageCode").string(var_368.as_str());
}
if let Some(var_369) = &input.language_code {
object.key("languageCode").string(var_369.as_str());
}
if let Some(var_370) = &input.source_settings {
let mut object_371 = object.key("sourceSettings").start_object();
crate::json_ser::serialize_structure_crate_model_caption_source_settings(
&mut object_371,
var_370,
)?;
object_371.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_rectangle(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Rectangle,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.height != 0 {
object.key("height").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.height).into()),
);
}
if input.width != 0 {
object.key("width").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.width).into()),
);
}
if input.x != 0 {
object.key("x").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.x).into()),
);
}
if input.y != 0 {
object.key("y").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.y).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_input_decryption_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::InputDecryptionSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_372) = &input.decryption_mode {
object.key("decryptionMode").string(var_372.as_str());
}
if let Some(var_373) = &input.encrypted_decryption_key {
object
.key("encryptedDecryptionKey")
.string(var_373.as_str());
}
if let Some(var_374) = &input.initialization_vector {
object.key("initializationVector").string(var_374.as_str());
}
if let Some(var_375) = &input.kms_key_region {
object.key("kmsKeyRegion").string(var_375.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_image_inserter(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ImageInserter,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_376) = &input.insertable_images {
let mut array_377 = object.key("insertableImages").start_array();
for item_378 in var_376 {
{
let mut object_379 = array_377.value().start_object();
crate::json_ser::serialize_structure_crate_model_insertable_image(
&mut object_379,
item_378,
)?;
object_379.finish();
}
}
array_377.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_input_clipping(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::InputClipping,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_380) = &input.end_timecode {
object.key("endTimecode").string(var_380.as_str());
}
if let Some(var_381) = &input.start_timecode {
object.key("startTimecode").string(var_381.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_input_video_generator(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::InputVideoGenerator,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.duration != 0 {
object.key("duration").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.duration).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_video_selector(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::VideoSelector,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_382) = &input.alpha_behavior {
object.key("alphaBehavior").string(var_382.as_str());
}
if let Some(var_383) = &input.color_space {
object.key("colorSpace").string(var_383.as_str());
}
if let Some(var_384) = &input.color_space_usage {
object.key("colorSpaceUsage").string(var_384.as_str());
}
if let Some(var_385) = &input.embedded_timecode_override {
object
.key("embeddedTimecodeOverride")
.string(var_385.as_str());
}
if let Some(var_386) = &input.hdr10_metadata {
let mut object_387 = object.key("hdr10Metadata").start_object();
crate::json_ser::serialize_structure_crate_model_hdr10_metadata(&mut object_387, var_386)?;
object_387.finish();
}
if let Some(var_388) = &input.pad_video {
object.key("padVideo").string(var_388.as_str());
}
if input.pid != 0 {
object.key("pid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.pid).into()),
);
}
if input.program_number != 0 {
object.key("programNumber").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.program_number).into()),
);
}
if let Some(var_389) = &input.rotate {
object.key("rotate").string(var_389.as_str());
}
if let Some(var_390) = &input.sample_range {
object.key("sampleRange").string(var_390.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_motion_image_insertion_framerate(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MotionImageInsertionFramerate,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_motion_image_insertion_offset(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MotionImageInsertionOffset,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.image_x != 0 {
object.key("imageX").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.image_x).into()),
);
}
if input.image_y != 0 {
object.key("imageY").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.image_y).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_automated_encoding_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AutomatedEncodingSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_391) = &input.abr_settings {
let mut object_392 = object.key("abrSettings").start_object();
crate::json_ser::serialize_structure_crate_model_automated_abr_settings(
&mut object_392,
var_391,
)?;
object_392.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_output_group_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::OutputGroupSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_393) = &input.cmaf_group_settings {
let mut object_394 = object.key("cmafGroupSettings").start_object();
crate::json_ser::serialize_structure_crate_model_cmaf_group_settings(
&mut object_394,
var_393,
)?;
object_394.finish();
}
if let Some(var_395) = &input.dash_iso_group_settings {
let mut object_396 = object.key("dashIsoGroupSettings").start_object();
crate::json_ser::serialize_structure_crate_model_dash_iso_group_settings(
&mut object_396,
var_395,
)?;
object_396.finish();
}
if let Some(var_397) = &input.file_group_settings {
let mut object_398 = object.key("fileGroupSettings").start_object();
crate::json_ser::serialize_structure_crate_model_file_group_settings(
&mut object_398,
var_397,
)?;
object_398.finish();
}
if let Some(var_399) = &input.hls_group_settings {
let mut object_400 = object.key("hlsGroupSettings").start_object();
crate::json_ser::serialize_structure_crate_model_hls_group_settings(
&mut object_400,
var_399,
)?;
object_400.finish();
}
if let Some(var_401) = &input.ms_smooth_group_settings {
let mut object_402 = object.key("msSmoothGroupSettings").start_object();
crate::json_ser::serialize_structure_crate_model_ms_smooth_group_settings(
&mut object_402,
var_401,
)?;
object_402.finish();
}
if let Some(var_403) = &input.r#type {
object.key("type").string(var_403.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_output(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Output,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_404) = &input.audio_descriptions {
let mut array_405 = object.key("audioDescriptions").start_array();
for item_406 in var_404 {
{
let mut object_407 = array_405.value().start_object();
crate::json_ser::serialize_structure_crate_model_audio_description(
&mut object_407,
item_406,
)?;
object_407.finish();
}
}
array_405.finish();
}
if let Some(var_408) = &input.caption_descriptions {
let mut array_409 = object.key("captionDescriptions").start_array();
for item_410 in var_408 {
{
let mut object_411 = array_409.value().start_object();
crate::json_ser::serialize_structure_crate_model_caption_description(
&mut object_411,
item_410,
)?;
object_411.finish();
}
}
array_409.finish();
}
if let Some(var_412) = &input.container_settings {
let mut object_413 = object.key("containerSettings").start_object();
crate::json_ser::serialize_structure_crate_model_container_settings(
&mut object_413,
var_412,
)?;
object_413.finish();
}
if let Some(var_414) = &input.extension {
object.key("extension").string(var_414.as_str());
}
if let Some(var_415) = &input.name_modifier {
object.key("nameModifier").string(var_415.as_str());
}
if let Some(var_416) = &input.output_settings {
let mut object_417 = object.key("outputSettings").start_object();
crate::json_ser::serialize_structure_crate_model_output_settings(&mut object_417, var_416)?;
object_417.finish();
}
if let Some(var_418) = &input.preset {
object.key("preset").string(var_418.as_str());
}
if let Some(var_419) = &input.video_description {
let mut object_420 = object.key("videoDescription").start_object();
crate::json_ser::serialize_structure_crate_model_video_description(
&mut object_420,
var_419,
)?;
object_420.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_id3_insertion(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Id3Insertion,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_421) = &input.id3 {
object.key("id3").string(var_421.as_str());
}
if let Some(var_422) = &input.timecode {
object.key("timecode").string(var_422.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_audio_channel_tagging_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AudioChannelTaggingSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_423) = &input.channel_tag {
object.key("channelTag").string(var_423.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_audio_normalization_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AudioNormalizationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_424) = &input.algorithm {
object.key("algorithm").string(var_424.as_str());
}
if let Some(var_425) = &input.algorithm_control {
object.key("algorithmControl").string(var_425.as_str());
}
if input.correction_gate_level != 0 {
object.key("correctionGateLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.correction_gate_level).into()),
);
}
if let Some(var_426) = &input.loudness_logging {
object.key("loudnessLogging").string(var_426.as_str());
}
if let Some(var_427) = &input.peak_calculation {
object.key("peakCalculation").string(var_427.as_str());
}
if input.target_lkfs != 0.0 {
object.key("targetLkfs").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.target_lkfs).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_audio_codec_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AudioCodecSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_428) = &input.aac_settings {
let mut object_429 = object.key("aacSettings").start_object();
crate::json_ser::serialize_structure_crate_model_aac_settings(&mut object_429, var_428)?;
object_429.finish();
}
if let Some(var_430) = &input.ac3_settings {
let mut object_431 = object.key("ac3Settings").start_object();
crate::json_ser::serialize_structure_crate_model_ac3_settings(&mut object_431, var_430)?;
object_431.finish();
}
if let Some(var_432) = &input.aiff_settings {
let mut object_433 = object.key("aiffSettings").start_object();
crate::json_ser::serialize_structure_crate_model_aiff_settings(&mut object_433, var_432)?;
object_433.finish();
}
if let Some(var_434) = &input.codec {
object.key("codec").string(var_434.as_str());
}
if let Some(var_435) = &input.eac3_atmos_settings {
let mut object_436 = object.key("eac3AtmosSettings").start_object();
crate::json_ser::serialize_structure_crate_model_eac3_atmos_settings(
&mut object_436,
var_435,
)?;
object_436.finish();
}
if let Some(var_437) = &input.eac3_settings {
let mut object_438 = object.key("eac3Settings").start_object();
crate::json_ser::serialize_structure_crate_model_eac3_settings(&mut object_438, var_437)?;
object_438.finish();
}
if let Some(var_439) = &input.mp2_settings {
let mut object_440 = object.key("mp2Settings").start_object();
crate::json_ser::serialize_structure_crate_model_mp2_settings(&mut object_440, var_439)?;
object_440.finish();
}
if let Some(var_441) = &input.mp3_settings {
let mut object_442 = object.key("mp3Settings").start_object();
crate::json_ser::serialize_structure_crate_model_mp3_settings(&mut object_442, var_441)?;
object_442.finish();
}
if let Some(var_443) = &input.opus_settings {
let mut object_444 = object.key("opusSettings").start_object();
crate::json_ser::serialize_structure_crate_model_opus_settings(&mut object_444, var_443)?;
object_444.finish();
}
if let Some(var_445) = &input.vorbis_settings {
let mut object_446 = object.key("vorbisSettings").start_object();
crate::json_ser::serialize_structure_crate_model_vorbis_settings(&mut object_446, var_445)?;
object_446.finish();
}
if let Some(var_447) = &input.wav_settings {
let mut object_448 = object.key("wavSettings").start_object();
crate::json_ser::serialize_structure_crate_model_wav_settings(&mut object_448, var_447)?;
object_448.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_remix_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::RemixSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_449) = &input.channel_mapping {
let mut object_450 = object.key("channelMapping").start_object();
crate::json_ser::serialize_structure_crate_model_channel_mapping(&mut object_450, var_449)?;
object_450.finish();
}
if input.channels_in != 0 {
object.key("channelsIn").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.channels_in).into()),
);
}
if input.channels_out != 0 {
object.key("channelsOut").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.channels_out).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_caption_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CaptionDestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_451) = &input.burnin_destination_settings {
let mut object_452 = object.key("burninDestinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_burnin_destination_settings(
&mut object_452,
var_451,
)?;
object_452.finish();
}
if let Some(var_453) = &input.destination_type {
object.key("destinationType").string(var_453.as_str());
}
if let Some(var_454) = &input.dvb_sub_destination_settings {
let mut object_455 = object.key("dvbSubDestinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_dvb_sub_destination_settings(
&mut object_455,
var_454,
)?;
object_455.finish();
}
if let Some(var_456) = &input.embedded_destination_settings {
let mut object_457 = object.key("embeddedDestinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_embedded_destination_settings(
&mut object_457,
var_456,
)?;
object_457.finish();
}
if let Some(var_458) = &input.imsc_destination_settings {
let mut object_459 = object.key("imscDestinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_imsc_destination_settings(
&mut object_459,
var_458,
)?;
object_459.finish();
}
if let Some(var_460) = &input.scc_destination_settings {
let mut object_461 = object.key("sccDestinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_scc_destination_settings(
&mut object_461,
var_460,
)?;
object_461.finish();
}
if let Some(var_462) = &input.srt_destination_settings {
let mut object_463 = object.key("srtDestinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_srt_destination_settings(
&mut object_463,
var_462,
)?;
object_463.finish();
}
if let Some(var_464) = &input.teletext_destination_settings {
let mut object_465 = object.key("teletextDestinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_teletext_destination_settings(
&mut object_465,
var_464,
)?;
object_465.finish();
}
if let Some(var_466) = &input.ttml_destination_settings {
let mut object_467 = object.key("ttmlDestinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_ttml_destination_settings(
&mut object_467,
var_466,
)?;
object_467.finish();
}
if let Some(var_468) = &input.webvtt_destination_settings {
let mut object_469 = object.key("webvttDestinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_webvtt_destination_settings(
&mut object_469,
var_468,
)?;
object_469.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_cmfc_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CmfcSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_470) = &input.audio_duration {
object.key("audioDuration").string(var_470.as_str());
}
if let Some(var_471) = &input.audio_group_id {
object.key("audioGroupId").string(var_471.as_str());
}
if let Some(var_472) = &input.audio_rendition_sets {
object.key("audioRenditionSets").string(var_472.as_str());
}
if let Some(var_473) = &input.audio_track_type {
object.key("audioTrackType").string(var_473.as_str());
}
if let Some(var_474) = &input.descriptive_video_service_flag {
object
.key("descriptiveVideoServiceFlag")
.string(var_474.as_str());
}
if let Some(var_475) = &input.i_frame_only_manifest {
object.key("iFrameOnlyManifest").string(var_475.as_str());
}
if let Some(var_476) = &input.klv_metadata {
object.key("klvMetadata").string(var_476.as_str());
}
if let Some(var_477) = &input.scte35_esam {
object.key("scte35Esam").string(var_477.as_str());
}
if let Some(var_478) = &input.scte35_source {
object.key("scte35Source").string(var_478.as_str());
}
if let Some(var_479) = &input.timed_metadata {
object.key("timedMetadata").string(var_479.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_f4v_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::F4vSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_480) = &input.moov_placement {
object.key("moovPlacement").string(var_480.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_m2ts_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::M2tsSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_481) = &input.audio_buffer_model {
object.key("audioBufferModel").string(var_481.as_str());
}
if let Some(var_482) = &input.audio_duration {
object.key("audioDuration").string(var_482.as_str());
}
if input.audio_frames_per_pes != 0 {
object.key("audioFramesPerPes").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.audio_frames_per_pes).into()),
);
}
if let Some(var_483) = &input.audio_pids {
let mut array_484 = object.key("audioPids").start_array();
for item_485 in var_483 {
{
array_484.value().number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((*item_485).into()),
);
}
}
array_484.finish();
}
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if let Some(var_486) = &input.buffer_model {
object.key("bufferModel").string(var_486.as_str());
}
if let Some(var_487) = &input.data_pts_control {
object.key("dataPTSControl").string(var_487.as_str());
}
if let Some(var_488) = &input.dvb_nit_settings {
let mut object_489 = object.key("dvbNitSettings").start_object();
crate::json_ser::serialize_structure_crate_model_dvb_nit_settings(
&mut object_489,
var_488,
)?;
object_489.finish();
}
if let Some(var_490) = &input.dvb_sdt_settings {
let mut object_491 = object.key("dvbSdtSettings").start_object();
crate::json_ser::serialize_structure_crate_model_dvb_sdt_settings(
&mut object_491,
var_490,
)?;
object_491.finish();
}
if let Some(var_492) = &input.dvb_sub_pids {
let mut array_493 = object.key("dvbSubPids").start_array();
for item_494 in var_492 {
{
array_493.value().number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((*item_494).into()),
);
}
}
array_493.finish();
}
if let Some(var_495) = &input.dvb_tdt_settings {
let mut object_496 = object.key("dvbTdtSettings").start_object();
crate::json_ser::serialize_structure_crate_model_dvb_tdt_settings(
&mut object_496,
var_495,
)?;
object_496.finish();
}
if input.dvb_teletext_pid != 0 {
object.key("dvbTeletextPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.dvb_teletext_pid).into()),
);
}
if let Some(var_497) = &input.ebp_audio_interval {
object.key("ebpAudioInterval").string(var_497.as_str());
}
if let Some(var_498) = &input.ebp_placement {
object.key("ebpPlacement").string(var_498.as_str());
}
if let Some(var_499) = &input.es_rate_in_pes {
object.key("esRateInPes").string(var_499.as_str());
}
if let Some(var_500) = &input.force_ts_video_ebp_order {
object.key("forceTsVideoEbpOrder").string(var_500.as_str());
}
if input.fragment_time != 0.0 {
object.key("fragmentTime").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.fragment_time).into()),
);
}
if let Some(var_501) = &input.klv_metadata {
object.key("klvMetadata").string(var_501.as_str());
}
if input.max_pcr_interval != 0 {
object.key("maxPcrInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_pcr_interval).into()),
);
}
if input.min_ebp_interval != 0 {
object.key("minEbpInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.min_ebp_interval).into()),
);
}
if let Some(var_502) = &input.nielsen_id3 {
object.key("nielsenId3").string(var_502.as_str());
}
if input.null_packet_bitrate != 0.0 {
object.key("nullPacketBitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.null_packet_bitrate).into()),
);
}
if input.pat_interval != 0 {
object.key("patInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.pat_interval).into()),
);
}
if let Some(var_503) = &input.pcr_control {
object.key("pcrControl").string(var_503.as_str());
}
if input.pcr_pid != 0 {
object.key("pcrPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.pcr_pid).into()),
);
}
if input.pmt_interval != 0 {
object.key("pmtInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.pmt_interval).into()),
);
}
if input.pmt_pid != 0 {
object.key("pmtPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.pmt_pid).into()),
);
}
if input.private_metadata_pid != 0 {
object.key("privateMetadataPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.private_metadata_pid).into()),
);
}
if input.program_number != 0 {
object.key("programNumber").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.program_number).into()),
);
}
if let Some(var_504) = &input.rate_mode {
object.key("rateMode").string(var_504.as_str());
}
if let Some(var_505) = &input.scte35_esam {
let mut object_506 = object.key("scte35Esam").start_object();
crate::json_ser::serialize_structure_crate_model_m2ts_scte35_esam(
&mut object_506,
var_505,
)?;
object_506.finish();
}
if input.scte35_pid != 0 {
object.key("scte35Pid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.scte35_pid).into()),
);
}
if let Some(var_507) = &input.scte35_source {
object.key("scte35Source").string(var_507.as_str());
}
if let Some(var_508) = &input.segmentation_markers {
object.key("segmentationMarkers").string(var_508.as_str());
}
if let Some(var_509) = &input.segmentation_style {
object.key("segmentationStyle").string(var_509.as_str());
}
if input.segmentation_time != 0.0 {
object.key("segmentationTime").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.segmentation_time).into()),
);
}
if input.timed_metadata_pid != 0 {
object.key("timedMetadataPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.timed_metadata_pid).into()),
);
}
if input.transport_stream_id != 0 {
object.key("transportStreamId").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.transport_stream_id).into()),
);
}
if input.video_pid != 0 {
object.key("videoPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.video_pid).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_m3u8_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::M3u8Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_510) = &input.audio_duration {
object.key("audioDuration").string(var_510.as_str());
}
if input.audio_frames_per_pes != 0 {
object.key("audioFramesPerPes").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.audio_frames_per_pes).into()),
);
}
if let Some(var_511) = &input.audio_pids {
let mut array_512 = object.key("audioPids").start_array();
for item_513 in var_511 {
{
array_512.value().number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((*item_513).into()),
);
}
}
array_512.finish();
}
if let Some(var_514) = &input.data_pts_control {
object.key("dataPTSControl").string(var_514.as_str());
}
if input.max_pcr_interval != 0 {
object.key("maxPcrInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_pcr_interval).into()),
);
}
if let Some(var_515) = &input.nielsen_id3 {
object.key("nielsenId3").string(var_515.as_str());
}
if input.pat_interval != 0 {
object.key("patInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.pat_interval).into()),
);
}
if let Some(var_516) = &input.pcr_control {
object.key("pcrControl").string(var_516.as_str());
}
if input.pcr_pid != 0 {
object.key("pcrPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.pcr_pid).into()),
);
}
if input.pmt_interval != 0 {
object.key("pmtInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.pmt_interval).into()),
);
}
if input.pmt_pid != 0 {
object.key("pmtPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.pmt_pid).into()),
);
}
if input.private_metadata_pid != 0 {
object.key("privateMetadataPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.private_metadata_pid).into()),
);
}
if input.program_number != 0 {
object.key("programNumber").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.program_number).into()),
);
}
if input.scte35_pid != 0 {
object.key("scte35Pid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.scte35_pid).into()),
);
}
if let Some(var_517) = &input.scte35_source {
object.key("scte35Source").string(var_517.as_str());
}
if let Some(var_518) = &input.timed_metadata {
object.key("timedMetadata").string(var_518.as_str());
}
if input.timed_metadata_pid != 0 {
object.key("timedMetadataPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.timed_metadata_pid).into()),
);
}
if input.transport_stream_id != 0 {
object.key("transportStreamId").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.transport_stream_id).into()),
);
}
if input.video_pid != 0 {
object.key("videoPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.video_pid).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_mov_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MovSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_519) = &input.clap_atom {
object.key("clapAtom").string(var_519.as_str());
}
if let Some(var_520) = &input.cslg_atom {
object.key("cslgAtom").string(var_520.as_str());
}
if let Some(var_521) = &input.mpeg2_four_cc_control {
object.key("mpeg2FourCCControl").string(var_521.as_str());
}
if let Some(var_522) = &input.padding_control {
object.key("paddingControl").string(var_522.as_str());
}
if let Some(var_523) = &input.reference {
object.key("reference").string(var_523.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_mp4_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Mp4Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_524) = &input.audio_duration {
object.key("audioDuration").string(var_524.as_str());
}
if let Some(var_525) = &input.cslg_atom {
object.key("cslgAtom").string(var_525.as_str());
}
if input.ctts_version != 0 {
object.key("cttsVersion").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.ctts_version).into()),
);
}
if let Some(var_526) = &input.free_space_box {
object.key("freeSpaceBox").string(var_526.as_str());
}
if let Some(var_527) = &input.moov_placement {
object.key("moovPlacement").string(var_527.as_str());
}
if let Some(var_528) = &input.mp4_major_brand {
object.key("mp4MajorBrand").string(var_528.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_mpd_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MpdSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_529) = &input.accessibility_caption_hints {
object
.key("accessibilityCaptionHints")
.string(var_529.as_str());
}
if let Some(var_530) = &input.audio_duration {
object.key("audioDuration").string(var_530.as_str());
}
if let Some(var_531) = &input.caption_container_type {
object.key("captionContainerType").string(var_531.as_str());
}
if let Some(var_532) = &input.klv_metadata {
object.key("klvMetadata").string(var_532.as_str());
}
if let Some(var_533) = &input.scte35_esam {
object.key("scte35Esam").string(var_533.as_str());
}
if let Some(var_534) = &input.scte35_source {
object.key("scte35Source").string(var_534.as_str());
}
if let Some(var_535) = &input.timed_metadata {
object.key("timedMetadata").string(var_535.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_mxf_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MxfSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_536) = &input.afd_signaling {
object.key("afdSignaling").string(var_536.as_str());
}
if let Some(var_537) = &input.profile {
object.key("profile").string(var_537.as_str());
}
if let Some(var_538) = &input.xavc_profile_settings {
let mut object_539 = object.key("xavcProfileSettings").start_object();
crate::json_ser::serialize_structure_crate_model_mxf_xavc_profile_settings(
&mut object_539,
var_538,
)?;
object_539.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_video_codec_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::VideoCodecSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_540) = &input.av1_settings {
let mut object_541 = object.key("av1Settings").start_object();
crate::json_ser::serialize_structure_crate_model_av1_settings(&mut object_541, var_540)?;
object_541.finish();
}
if let Some(var_542) = &input.avc_intra_settings {
let mut object_543 = object.key("avcIntraSettings").start_object();
crate::json_ser::serialize_structure_crate_model_avc_intra_settings(
&mut object_543,
var_542,
)?;
object_543.finish();
}
if let Some(var_544) = &input.codec {
object.key("codec").string(var_544.as_str());
}
if let Some(var_545) = &input.frame_capture_settings {
let mut object_546 = object.key("frameCaptureSettings").start_object();
crate::json_ser::serialize_structure_crate_model_frame_capture_settings(
&mut object_546,
var_545,
)?;
object_546.finish();
}
if let Some(var_547) = &input.h264_settings {
let mut object_548 = object.key("h264Settings").start_object();
crate::json_ser::serialize_structure_crate_model_h264_settings(&mut object_548, var_547)?;
object_548.finish();
}
if let Some(var_549) = &input.h265_settings {
let mut object_550 = object.key("h265Settings").start_object();
crate::json_ser::serialize_structure_crate_model_h265_settings(&mut object_550, var_549)?;
object_550.finish();
}
if let Some(var_551) = &input.mpeg2_settings {
let mut object_552 = object.key("mpeg2Settings").start_object();
crate::json_ser::serialize_structure_crate_model_mpeg2_settings(&mut object_552, var_551)?;
object_552.finish();
}
if let Some(var_553) = &input.prores_settings {
let mut object_554 = object.key("proresSettings").start_object();
crate::json_ser::serialize_structure_crate_model_prores_settings(&mut object_554, var_553)?;
object_554.finish();
}
if let Some(var_555) = &input.vc3_settings {
let mut object_556 = object.key("vc3Settings").start_object();
crate::json_ser::serialize_structure_crate_model_vc3_settings(&mut object_556, var_555)?;
object_556.finish();
}
if let Some(var_557) = &input.vp8_settings {
let mut object_558 = object.key("vp8Settings").start_object();
crate::json_ser::serialize_structure_crate_model_vp8_settings(&mut object_558, var_557)?;
object_558.finish();
}
if let Some(var_559) = &input.vp9_settings {
let mut object_560 = object.key("vp9Settings").start_object();
crate::json_ser::serialize_structure_crate_model_vp9_settings(&mut object_560, var_559)?;
object_560.finish();
}
if let Some(var_561) = &input.xavc_settings {
let mut object_562 = object.key("xavcSettings").start_object();
crate::json_ser::serialize_structure_crate_model_xavc_settings(&mut object_562, var_561)?;
object_562.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_video_preprocessor(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::VideoPreprocessor,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_563) = &input.color_corrector {
let mut object_564 = object.key("colorCorrector").start_object();
crate::json_ser::serialize_structure_crate_model_color_corrector(&mut object_564, var_563)?;
object_564.finish();
}
if let Some(var_565) = &input.deinterlacer {
let mut object_566 = object.key("deinterlacer").start_object();
crate::json_ser::serialize_structure_crate_model_deinterlacer(&mut object_566, var_565)?;
object_566.finish();
}
if let Some(var_567) = &input.dolby_vision {
let mut object_568 = object.key("dolbyVision").start_object();
crate::json_ser::serialize_structure_crate_model_dolby_vision(&mut object_568, var_567)?;
object_568.finish();
}
if let Some(var_569) = &input.hdr10_plus {
let mut object_570 = object.key("hdr10Plus").start_object();
crate::json_ser::serialize_structure_crate_model_hdr10_plus(&mut object_570, var_569)?;
object_570.finish();
}
if let Some(var_571) = &input.image_inserter {
let mut object_572 = object.key("imageInserter").start_object();
crate::json_ser::serialize_structure_crate_model_image_inserter(&mut object_572, var_571)?;
object_572.finish();
}
if let Some(var_573) = &input.noise_reducer {
let mut object_574 = object.key("noiseReducer").start_object();
crate::json_ser::serialize_structure_crate_model_noise_reducer(&mut object_574, var_573)?;
object_574.finish();
}
if let Some(var_575) = &input.partner_watermarking {
let mut object_576 = object.key("partnerWatermarking").start_object();
crate::json_ser::serialize_structure_crate_model_partner_watermarking(
&mut object_576,
var_575,
)?;
object_576.finish();
}
if let Some(var_577) = &input.timecode_burnin {
let mut object_578 = object.key("timecodeBurnin").start_object();
crate::json_ser::serialize_structure_crate_model_timecode_burnin(&mut object_578, var_577)?;
object_578.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_hls_rendition_group_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::HlsRenditionGroupSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_579) = &input.rendition_group_id {
object.key("renditionGroupId").string(var_579.as_str());
}
if let Some(var_580) = &input.rendition_language_code {
object.key("renditionLanguageCode").string(var_580.as_str());
}
if let Some(var_581) = &input.rendition_name {
object.key("renditionName").string(var_581.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_caption_source_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CaptionSourceSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_582) = &input.ancillary_source_settings {
let mut object_583 = object.key("ancillarySourceSettings").start_object();
crate::json_ser::serialize_structure_crate_model_ancillary_source_settings(
&mut object_583,
var_582,
)?;
object_583.finish();
}
if let Some(var_584) = &input.dvb_sub_source_settings {
let mut object_585 = object.key("dvbSubSourceSettings").start_object();
crate::json_ser::serialize_structure_crate_model_dvb_sub_source_settings(
&mut object_585,
var_584,
)?;
object_585.finish();
}
if let Some(var_586) = &input.embedded_source_settings {
let mut object_587 = object.key("embeddedSourceSettings").start_object();
crate::json_ser::serialize_structure_crate_model_embedded_source_settings(
&mut object_587,
var_586,
)?;
object_587.finish();
}
if let Some(var_588) = &input.file_source_settings {
let mut object_589 = object.key("fileSourceSettings").start_object();
crate::json_ser::serialize_structure_crate_model_file_source_settings(
&mut object_589,
var_588,
)?;
object_589.finish();
}
if let Some(var_590) = &input.source_type {
object.key("sourceType").string(var_590.as_str());
}
if let Some(var_591) = &input.teletext_source_settings {
let mut object_592 = object.key("teletextSourceSettings").start_object();
crate::json_ser::serialize_structure_crate_model_teletext_source_settings(
&mut object_592,
var_591,
)?;
object_592.finish();
}
if let Some(var_593) = &input.track_source_settings {
let mut object_594 = object.key("trackSourceSettings").start_object();
crate::json_ser::serialize_structure_crate_model_track_source_settings(
&mut object_594,
var_593,
)?;
object_594.finish();
}
if let Some(var_595) = &input.webvtt_hls_source_settings {
let mut object_596 = object.key("webvttHlsSourceSettings").start_object();
crate::json_ser::serialize_structure_crate_model_webvtt_hls_source_settings(
&mut object_596,
var_595,
)?;
object_596.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_insertable_image(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::InsertableImage,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.duration != 0 {
object.key("duration").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.duration).into()),
);
}
if input.fade_in != 0 {
object.key("fadeIn").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.fade_in).into()),
);
}
if input.fade_out != 0 {
object.key("fadeOut").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.fade_out).into()),
);
}
if input.height != 0 {
object.key("height").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.height).into()),
);
}
if let Some(var_597) = &input.image_inserter_input {
object.key("imageInserterInput").string(var_597.as_str());
}
if input.image_x != 0 {
object.key("imageX").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.image_x).into()),
);
}
if input.image_y != 0 {
object.key("imageY").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.image_y).into()),
);
}
if input.layer != 0 {
object.key("layer").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.layer).into()),
);
}
if input.opacity != 0 {
object.key("opacity").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.opacity).into()),
);
}
if let Some(var_598) = &input.start_time {
object.key("startTime").string(var_598.as_str());
}
if input.width != 0 {
object.key("width").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.width).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_hdr10_metadata(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Hdr10Metadata,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.blue_primary_x != 0 {
object.key("bluePrimaryX").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.blue_primary_x).into()),
);
}
if input.blue_primary_y != 0 {
object.key("bluePrimaryY").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.blue_primary_y).into()),
);
}
if input.green_primary_x != 0 {
object.key("greenPrimaryX").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.green_primary_x).into()),
);
}
if input.green_primary_y != 0 {
object.key("greenPrimaryY").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.green_primary_y).into()),
);
}
if input.max_content_light_level != 0 {
object.key("maxContentLightLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_content_light_level).into()),
);
}
if input.max_frame_average_light_level != 0 {
object.key("maxFrameAverageLightLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_frame_average_light_level).into()),
);
}
if input.max_luminance != 0 {
object.key("maxLuminance").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_luminance).into()),
);
}
if input.min_luminance != 0 {
object.key("minLuminance").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.min_luminance).into()),
);
}
if input.red_primary_x != 0 {
object.key("redPrimaryX").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.red_primary_x).into()),
);
}
if input.red_primary_y != 0 {
object.key("redPrimaryY").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.red_primary_y).into()),
);
}
if input.white_point_x != 0 {
object.key("whitePointX").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.white_point_x).into()),
);
}
if input.white_point_y != 0 {
object.key("whitePointY").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.white_point_y).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_automated_abr_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AutomatedAbrSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.max_abr_bitrate != 0 {
object.key("maxAbrBitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_abr_bitrate).into()),
);
}
if input.max_renditions != 0 {
object.key("maxRenditions").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_renditions).into()),
);
}
if input.min_abr_bitrate != 0 {
object.key("minAbrBitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.min_abr_bitrate).into()),
);
}
if let Some(var_599) = &input.rules {
let mut array_600 = object.key("rules").start_array();
for item_601 in var_599 {
{
let mut object_602 = array_600.value().start_object();
crate::json_ser::serialize_structure_crate_model_automated_abr_rule(
&mut object_602,
item_601,
)?;
object_602.finish();
}
}
array_600.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_cmaf_group_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CmafGroupSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_603) = &input.additional_manifests {
let mut array_604 = object.key("additionalManifests").start_array();
for item_605 in var_603 {
{
let mut object_606 = array_604.value().start_object();
crate::json_ser::serialize_structure_crate_model_cmaf_additional_manifest(
&mut object_606,
item_605,
)?;
object_606.finish();
}
}
array_604.finish();
}
if let Some(var_607) = &input.base_url {
object.key("baseUrl").string(var_607.as_str());
}
if let Some(var_608) = &input.client_cache {
object.key("clientCache").string(var_608.as_str());
}
if let Some(var_609) = &input.codec_specification {
object.key("codecSpecification").string(var_609.as_str());
}
if let Some(var_610) = &input.destination {
object.key("destination").string(var_610.as_str());
}
if let Some(var_611) = &input.destination_settings {
let mut object_612 = object.key("destinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_destination_settings(
&mut object_612,
var_611,
)?;
object_612.finish();
}
if let Some(var_613) = &input.encryption {
let mut object_614 = object.key("encryption").start_object();
crate::json_ser::serialize_structure_crate_model_cmaf_encryption_settings(
&mut object_614,
var_613,
)?;
object_614.finish();
}
if input.fragment_length != 0 {
object.key("fragmentLength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.fragment_length).into()),
);
}
if let Some(var_615) = &input.image_based_trick_play {
object.key("imageBasedTrickPlay").string(var_615.as_str());
}
if let Some(var_616) = &input.image_based_trick_play_settings {
let mut object_617 = object.key("imageBasedTrickPlaySettings").start_object();
crate::json_ser::serialize_structure_crate_model_cmaf_image_based_trick_play_settings(
&mut object_617,
var_616,
)?;
object_617.finish();
}
if let Some(var_618) = &input.manifest_compression {
object.key("manifestCompression").string(var_618.as_str());
}
if let Some(var_619) = &input.manifest_duration_format {
object
.key("manifestDurationFormat")
.string(var_619.as_str());
}
if input.min_buffer_time != 0 {
object.key("minBufferTime").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.min_buffer_time).into()),
);
}
if input.min_final_segment_length != 0.0 {
object.key("minFinalSegmentLength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.min_final_segment_length).into()),
);
}
if let Some(var_620) = &input.mpd_profile {
object.key("mpdProfile").string(var_620.as_str());
}
if let Some(var_621) = &input.pts_offset_handling_for_b_frames {
object
.key("ptsOffsetHandlingForBFrames")
.string(var_621.as_str());
}
if let Some(var_622) = &input.segment_control {
object.key("segmentControl").string(var_622.as_str());
}
if input.segment_length != 0 {
object.key("segmentLength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.segment_length).into()),
);
}
if let Some(var_623) = &input.segment_length_control {
object.key("segmentLengthControl").string(var_623.as_str());
}
if let Some(var_624) = &input.stream_inf_resolution {
object.key("streamInfResolution").string(var_624.as_str());
}
if let Some(var_625) = &input.target_duration_compatibility_mode {
object
.key("targetDurationCompatibilityMode")
.string(var_625.as_str());
}
if let Some(var_626) = &input.write_dash_manifest {
object.key("writeDashManifest").string(var_626.as_str());
}
if let Some(var_627) = &input.write_hls_manifest {
object.key("writeHlsManifest").string(var_627.as_str());
}
if let Some(var_628) = &input.write_segment_timeline_in_representation {
object
.key("writeSegmentTimelineInRepresentation")
.string(var_628.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_dash_iso_group_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DashIsoGroupSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_629) = &input.additional_manifests {
let mut array_630 = object.key("additionalManifests").start_array();
for item_631 in var_629 {
{
let mut object_632 = array_630.value().start_object();
crate::json_ser::serialize_structure_crate_model_dash_additional_manifest(
&mut object_632,
item_631,
)?;
object_632.finish();
}
}
array_630.finish();
}
if let Some(var_633) = &input.audio_channel_config_scheme_id_uri {
object
.key("audioChannelConfigSchemeIdUri")
.string(var_633.as_str());
}
if let Some(var_634) = &input.base_url {
object.key("baseUrl").string(var_634.as_str());
}
if let Some(var_635) = &input.destination {
object.key("destination").string(var_635.as_str());
}
if let Some(var_636) = &input.destination_settings {
let mut object_637 = object.key("destinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_destination_settings(
&mut object_637,
var_636,
)?;
object_637.finish();
}
if let Some(var_638) = &input.encryption {
let mut object_639 = object.key("encryption").start_object();
crate::json_ser::serialize_structure_crate_model_dash_iso_encryption_settings(
&mut object_639,
var_638,
)?;
object_639.finish();
}
if input.fragment_length != 0 {
object.key("fragmentLength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.fragment_length).into()),
);
}
if let Some(var_640) = &input.hbbtv_compliance {
object.key("hbbtvCompliance").string(var_640.as_str());
}
if let Some(var_641) = &input.image_based_trick_play {
object.key("imageBasedTrickPlay").string(var_641.as_str());
}
if let Some(var_642) = &input.image_based_trick_play_settings {
let mut object_643 = object.key("imageBasedTrickPlaySettings").start_object();
crate::json_ser::serialize_structure_crate_model_dash_iso_image_based_trick_play_settings(
&mut object_643,
var_642,
)?;
object_643.finish();
}
if input.min_buffer_time != 0 {
object.key("minBufferTime").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.min_buffer_time).into()),
);
}
if input.min_final_segment_length != 0.0 {
object.key("minFinalSegmentLength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.min_final_segment_length).into()),
);
}
if let Some(var_644) = &input.mpd_profile {
object.key("mpdProfile").string(var_644.as_str());
}
if let Some(var_645) = &input.pts_offset_handling_for_b_frames {
object
.key("ptsOffsetHandlingForBFrames")
.string(var_645.as_str());
}
if let Some(var_646) = &input.segment_control {
object.key("segmentControl").string(var_646.as_str());
}
if input.segment_length != 0 {
object.key("segmentLength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.segment_length).into()),
);
}
if let Some(var_647) = &input.segment_length_control {
object.key("segmentLengthControl").string(var_647.as_str());
}
if let Some(var_648) = &input.write_segment_timeline_in_representation {
object
.key("writeSegmentTimelineInRepresentation")
.string(var_648.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_file_group_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::FileGroupSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_649) = &input.destination {
object.key("destination").string(var_649.as_str());
}
if let Some(var_650) = &input.destination_settings {
let mut object_651 = object.key("destinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_destination_settings(
&mut object_651,
var_650,
)?;
object_651.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_hls_group_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::HlsGroupSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_652) = &input.ad_markers {
let mut array_653 = object.key("adMarkers").start_array();
for item_654 in var_652 {
{
array_653.value().string(item_654.as_str());
}
}
array_653.finish();
}
if let Some(var_655) = &input.additional_manifests {
let mut array_656 = object.key("additionalManifests").start_array();
for item_657 in var_655 {
{
let mut object_658 = array_656.value().start_object();
crate::json_ser::serialize_structure_crate_model_hls_additional_manifest(
&mut object_658,
item_657,
)?;
object_658.finish();
}
}
array_656.finish();
}
if let Some(var_659) = &input.audio_only_header {
object.key("audioOnlyHeader").string(var_659.as_str());
}
if let Some(var_660) = &input.base_url {
object.key("baseUrl").string(var_660.as_str());
}
if let Some(var_661) = &input.caption_language_mappings {
let mut array_662 = object.key("captionLanguageMappings").start_array();
for item_663 in var_661 {
{
let mut object_664 = array_662.value().start_object();
crate::json_ser::serialize_structure_crate_model_hls_caption_language_mapping(
&mut object_664,
item_663,
)?;
object_664.finish();
}
}
array_662.finish();
}
if let Some(var_665) = &input.caption_language_setting {
object
.key("captionLanguageSetting")
.string(var_665.as_str());
}
if let Some(var_666) = &input.caption_segment_length_control {
object
.key("captionSegmentLengthControl")
.string(var_666.as_str());
}
if let Some(var_667) = &input.client_cache {
object.key("clientCache").string(var_667.as_str());
}
if let Some(var_668) = &input.codec_specification {
object.key("codecSpecification").string(var_668.as_str());
}
if let Some(var_669) = &input.destination {
object.key("destination").string(var_669.as_str());
}
if let Some(var_670) = &input.destination_settings {
let mut object_671 = object.key("destinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_destination_settings(
&mut object_671,
var_670,
)?;
object_671.finish();
}
if let Some(var_672) = &input.directory_structure {
object.key("directoryStructure").string(var_672.as_str());
}
if let Some(var_673) = &input.encryption {
let mut object_674 = object.key("encryption").start_object();
crate::json_ser::serialize_structure_crate_model_hls_encryption_settings(
&mut object_674,
var_673,
)?;
object_674.finish();
}
if let Some(var_675) = &input.image_based_trick_play {
object.key("imageBasedTrickPlay").string(var_675.as_str());
}
if let Some(var_676) = &input.image_based_trick_play_settings {
let mut object_677 = object.key("imageBasedTrickPlaySettings").start_object();
crate::json_ser::serialize_structure_crate_model_hls_image_based_trick_play_settings(
&mut object_677,
var_676,
)?;
object_677.finish();
}
if let Some(var_678) = &input.manifest_compression {
object.key("manifestCompression").string(var_678.as_str());
}
if let Some(var_679) = &input.manifest_duration_format {
object
.key("manifestDurationFormat")
.string(var_679.as_str());
}
if input.min_final_segment_length != 0.0 {
object.key("minFinalSegmentLength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.min_final_segment_length).into()),
);
}
if input.min_segment_length != 0 {
object.key("minSegmentLength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.min_segment_length).into()),
);
}
if let Some(var_680) = &input.output_selection {
object.key("outputSelection").string(var_680.as_str());
}
if let Some(var_681) = &input.program_date_time {
object.key("programDateTime").string(var_681.as_str());
}
if input.program_date_time_period != 0 {
object.key("programDateTimePeriod").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.program_date_time_period).into()),
);
}
if let Some(var_682) = &input.segment_control {
object.key("segmentControl").string(var_682.as_str());
}
if input.segment_length != 0 {
object.key("segmentLength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.segment_length).into()),
);
}
if let Some(var_683) = &input.segment_length_control {
object.key("segmentLengthControl").string(var_683.as_str());
}
if input.segments_per_subdirectory != 0 {
object.key("segmentsPerSubdirectory").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.segments_per_subdirectory).into()),
);
}
if let Some(var_684) = &input.stream_inf_resolution {
object.key("streamInfResolution").string(var_684.as_str());
}
if let Some(var_685) = &input.target_duration_compatibility_mode {
object
.key("targetDurationCompatibilityMode")
.string(var_685.as_str());
}
if let Some(var_686) = &input.timed_metadata_id3_frame {
object.key("timedMetadataId3Frame").string(var_686.as_str());
}
if input.timed_metadata_id3_period != 0 {
object.key("timedMetadataId3Period").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.timed_metadata_id3_period).into()),
);
}
if input.timestamp_delta_milliseconds != 0 {
object.key("timestampDeltaMilliseconds").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.timestamp_delta_milliseconds).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_ms_smooth_group_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MsSmoothGroupSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_687) = &input.additional_manifests {
let mut array_688 = object.key("additionalManifests").start_array();
for item_689 in var_687 {
{
let mut object_690 = array_688.value().start_object();
crate::json_ser::serialize_structure_crate_model_ms_smooth_additional_manifest(
&mut object_690,
item_689,
)?;
object_690.finish();
}
}
array_688.finish();
}
if let Some(var_691) = &input.audio_deduplication {
object.key("audioDeduplication").string(var_691.as_str());
}
if let Some(var_692) = &input.destination {
object.key("destination").string(var_692.as_str());
}
if let Some(var_693) = &input.destination_settings {
let mut object_694 = object.key("destinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_destination_settings(
&mut object_694,
var_693,
)?;
object_694.finish();
}
if let Some(var_695) = &input.encryption {
let mut object_696 = object.key("encryption").start_object();
crate::json_ser::serialize_structure_crate_model_ms_smooth_encryption_settings(
&mut object_696,
var_695,
)?;
object_696.finish();
}
if input.fragment_length != 0 {
object.key("fragmentLength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.fragment_length).into()),
);
}
if let Some(var_697) = &input.fragment_length_control {
object.key("fragmentLengthControl").string(var_697.as_str());
}
if let Some(var_698) = &input.manifest_encoding {
object.key("manifestEncoding").string(var_698.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_caption_description(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CaptionDescription,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_699) = &input.caption_selector_name {
object.key("captionSelectorName").string(var_699.as_str());
}
if let Some(var_700) = &input.custom_language_code {
object.key("customLanguageCode").string(var_700.as_str());
}
if let Some(var_701) = &input.destination_settings {
let mut object_702 = object.key("destinationSettings").start_object();
crate::json_ser::serialize_structure_crate_model_caption_destination_settings(
&mut object_702,
var_701,
)?;
object_702.finish();
}
if let Some(var_703) = &input.language_code {
object.key("languageCode").string(var_703.as_str());
}
if let Some(var_704) = &input.language_description {
object.key("languageDescription").string(var_704.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_output_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::OutputSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_705) = &input.hls_settings {
let mut object_706 = object.key("hlsSettings").start_object();
crate::json_ser::serialize_structure_crate_model_hls_settings(&mut object_706, var_705)?;
object_706.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_aac_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AacSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_707) = &input.audio_description_broadcaster_mix {
object
.key("audioDescriptionBroadcasterMix")
.string(var_707.as_str());
}
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if let Some(var_708) = &input.codec_profile {
object.key("codecProfile").string(var_708.as_str());
}
if let Some(var_709) = &input.coding_mode {
object.key("codingMode").string(var_709.as_str());
}
if let Some(var_710) = &input.rate_control_mode {
object.key("rateControlMode").string(var_710.as_str());
}
if let Some(var_711) = &input.raw_format {
object.key("rawFormat").string(var_711.as_str());
}
if input.sample_rate != 0 {
object.key("sampleRate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sample_rate).into()),
);
}
if let Some(var_712) = &input.specification {
object.key("specification").string(var_712.as_str());
}
if let Some(var_713) = &input.vbr_quality {
object.key("vbrQuality").string(var_713.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_ac3_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Ac3Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if let Some(var_714) = &input.bitstream_mode {
object.key("bitstreamMode").string(var_714.as_str());
}
if let Some(var_715) = &input.coding_mode {
object.key("codingMode").string(var_715.as_str());
}
if input.dialnorm != 0 {
object.key("dialnorm").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.dialnorm).into()),
);
}
if let Some(var_716) = &input.dynamic_range_compression_line {
object
.key("dynamicRangeCompressionLine")
.string(var_716.as_str());
}
if let Some(var_717) = &input.dynamic_range_compression_profile {
object
.key("dynamicRangeCompressionProfile")
.string(var_717.as_str());
}
if let Some(var_718) = &input.dynamic_range_compression_rf {
object
.key("dynamicRangeCompressionRf")
.string(var_718.as_str());
}
if let Some(var_719) = &input.lfe_filter {
object.key("lfeFilter").string(var_719.as_str());
}
if let Some(var_720) = &input.metadata_control {
object.key("metadataControl").string(var_720.as_str());
}
if input.sample_rate != 0 {
object.key("sampleRate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sample_rate).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_aiff_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AiffSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.bit_depth != 0 {
object.key("bitDepth").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bit_depth).into()),
);
}
if input.channels != 0 {
object.key("channels").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.channels).into()),
);
}
if input.sample_rate != 0 {
object.key("sampleRate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sample_rate).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_eac3_atmos_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Eac3AtmosSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if let Some(var_721) = &input.bitstream_mode {
object.key("bitstreamMode").string(var_721.as_str());
}
if let Some(var_722) = &input.coding_mode {
object.key("codingMode").string(var_722.as_str());
}
if let Some(var_723) = &input.dialogue_intelligence {
object.key("dialogueIntelligence").string(var_723.as_str());
}
if let Some(var_724) = &input.downmix_control {
object.key("downmixControl").string(var_724.as_str());
}
if let Some(var_725) = &input.dynamic_range_compression_line {
object
.key("dynamicRangeCompressionLine")
.string(var_725.as_str());
}
if let Some(var_726) = &input.dynamic_range_compression_rf {
object
.key("dynamicRangeCompressionRf")
.string(var_726.as_str());
}
if let Some(var_727) = &input.dynamic_range_control {
object.key("dynamicRangeControl").string(var_727.as_str());
}
if input.lo_ro_center_mix_level != 0.0 {
object.key("loRoCenterMixLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.lo_ro_center_mix_level).into()),
);
}
if input.lo_ro_surround_mix_level != 0.0 {
object.key("loRoSurroundMixLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.lo_ro_surround_mix_level).into()),
);
}
if input.lt_rt_center_mix_level != 0.0 {
object.key("ltRtCenterMixLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.lt_rt_center_mix_level).into()),
);
}
if input.lt_rt_surround_mix_level != 0.0 {
object.key("ltRtSurroundMixLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.lt_rt_surround_mix_level).into()),
);
}
if let Some(var_728) = &input.metering_mode {
object.key("meteringMode").string(var_728.as_str());
}
if input.sample_rate != 0 {
object.key("sampleRate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sample_rate).into()),
);
}
if input.speech_threshold != 0 {
object.key("speechThreshold").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.speech_threshold).into()),
);
}
if let Some(var_729) = &input.stereo_downmix {
object.key("stereoDownmix").string(var_729.as_str());
}
if let Some(var_730) = &input.surround_ex_mode {
object.key("surroundExMode").string(var_730.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_eac3_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Eac3Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_731) = &input.attenuation_control {
object.key("attenuationControl").string(var_731.as_str());
}
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if let Some(var_732) = &input.bitstream_mode {
object.key("bitstreamMode").string(var_732.as_str());
}
if let Some(var_733) = &input.coding_mode {
object.key("codingMode").string(var_733.as_str());
}
if let Some(var_734) = &input.dc_filter {
object.key("dcFilter").string(var_734.as_str());
}
if input.dialnorm != 0 {
object.key("dialnorm").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.dialnorm).into()),
);
}
if let Some(var_735) = &input.dynamic_range_compression_line {
object
.key("dynamicRangeCompressionLine")
.string(var_735.as_str());
}
if let Some(var_736) = &input.dynamic_range_compression_rf {
object
.key("dynamicRangeCompressionRf")
.string(var_736.as_str());
}
if let Some(var_737) = &input.lfe_control {
object.key("lfeControl").string(var_737.as_str());
}
if let Some(var_738) = &input.lfe_filter {
object.key("lfeFilter").string(var_738.as_str());
}
if input.lo_ro_center_mix_level != 0.0 {
object.key("loRoCenterMixLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.lo_ro_center_mix_level).into()),
);
}
if input.lo_ro_surround_mix_level != 0.0 {
object.key("loRoSurroundMixLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.lo_ro_surround_mix_level).into()),
);
}
if input.lt_rt_center_mix_level != 0.0 {
object.key("ltRtCenterMixLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.lt_rt_center_mix_level).into()),
);
}
if input.lt_rt_surround_mix_level != 0.0 {
object.key("ltRtSurroundMixLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.lt_rt_surround_mix_level).into()),
);
}
if let Some(var_739) = &input.metadata_control {
object.key("metadataControl").string(var_739.as_str());
}
if let Some(var_740) = &input.passthrough_control {
object.key("passthroughControl").string(var_740.as_str());
}
if let Some(var_741) = &input.phase_control {
object.key("phaseControl").string(var_741.as_str());
}
if input.sample_rate != 0 {
object.key("sampleRate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sample_rate).into()),
);
}
if let Some(var_742) = &input.stereo_downmix {
object.key("stereoDownmix").string(var_742.as_str());
}
if let Some(var_743) = &input.surround_ex_mode {
object.key("surroundExMode").string(var_743.as_str());
}
if let Some(var_744) = &input.surround_mode {
object.key("surroundMode").string(var_744.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_mp2_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Mp2Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if input.channels != 0 {
object.key("channels").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.channels).into()),
);
}
if input.sample_rate != 0 {
object.key("sampleRate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sample_rate).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_mp3_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Mp3Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if input.channels != 0 {
object.key("channels").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.channels).into()),
);
}
if let Some(var_745) = &input.rate_control_mode {
object.key("rateControlMode").string(var_745.as_str());
}
if input.sample_rate != 0 {
object.key("sampleRate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sample_rate).into()),
);
}
if input.vbr_quality != 0 {
object.key("vbrQuality").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.vbr_quality).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_opus_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::OpusSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if input.channels != 0 {
object.key("channels").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.channels).into()),
);
}
if input.sample_rate != 0 {
object.key("sampleRate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sample_rate).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_vorbis_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::VorbisSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.channels != 0 {
object.key("channels").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.channels).into()),
);
}
if input.sample_rate != 0 {
object.key("sampleRate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sample_rate).into()),
);
}
if input.vbr_quality != 0 {
object.key("vbrQuality").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.vbr_quality).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_wav_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::WavSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.bit_depth != 0 {
object.key("bitDepth").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bit_depth).into()),
);
}
if input.channels != 0 {
object.key("channels").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.channels).into()),
);
}
if let Some(var_746) = &input.format {
object.key("format").string(var_746.as_str());
}
if input.sample_rate != 0 {
object.key("sampleRate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sample_rate).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_channel_mapping(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ChannelMapping,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_747) = &input.output_channels {
let mut array_748 = object.key("outputChannels").start_array();
for item_749 in var_747 {
{
let mut object_750 = array_748.value().start_object();
crate::json_ser::serialize_structure_crate_model_output_channel_mapping(
&mut object_750,
item_749,
)?;
object_750.finish();
}
}
array_748.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_burnin_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::BurninDestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_751) = &input.alignment {
object.key("alignment").string(var_751.as_str());
}
if let Some(var_752) = &input.apply_font_color {
object.key("applyFontColor").string(var_752.as_str());
}
if let Some(var_753) = &input.background_color {
object.key("backgroundColor").string(var_753.as_str());
}
if input.background_opacity != 0 {
object.key("backgroundOpacity").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.background_opacity).into()),
);
}
if let Some(var_754) = &input.fallback_font {
object.key("fallbackFont").string(var_754.as_str());
}
if let Some(var_755) = &input.font_color {
object.key("fontColor").string(var_755.as_str());
}
if input.font_opacity != 0 {
object.key("fontOpacity").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.font_opacity).into()),
);
}
if input.font_resolution != 0 {
object.key("fontResolution").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.font_resolution).into()),
);
}
if let Some(var_756) = &input.font_script {
object.key("fontScript").string(var_756.as_str());
}
if input.font_size != 0 {
object.key("fontSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.font_size).into()),
);
}
if let Some(var_757) = &input.hex_font_color {
object.key("hexFontColor").string(var_757.as_str());
}
if let Some(var_758) = &input.outline_color {
object.key("outlineColor").string(var_758.as_str());
}
if input.outline_size != 0 {
object.key("outlineSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.outline_size).into()),
);
}
if let Some(var_759) = &input.shadow_color {
object.key("shadowColor").string(var_759.as_str());
}
if input.shadow_opacity != 0 {
object.key("shadowOpacity").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.shadow_opacity).into()),
);
}
if input.shadow_x_offset != 0 {
object.key("shadowXOffset").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.shadow_x_offset).into()),
);
}
if input.shadow_y_offset != 0 {
object.key("shadowYOffset").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.shadow_y_offset).into()),
);
}
if let Some(var_760) = &input.style_passthrough {
object.key("stylePassthrough").string(var_760.as_str());
}
if let Some(var_761) = &input.teletext_spacing {
object.key("teletextSpacing").string(var_761.as_str());
}
if input.x_position != 0 {
object.key("xPosition").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.x_position).into()),
);
}
if input.y_position != 0 {
object.key("yPosition").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.y_position).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_dvb_sub_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DvbSubDestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_762) = &input.alignment {
object.key("alignment").string(var_762.as_str());
}
if let Some(var_763) = &input.apply_font_color {
object.key("applyFontColor").string(var_763.as_str());
}
if let Some(var_764) = &input.background_color {
object.key("backgroundColor").string(var_764.as_str());
}
if input.background_opacity != 0 {
object.key("backgroundOpacity").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.background_opacity).into()),
);
}
if let Some(var_765) = &input.dds_handling {
object.key("ddsHandling").string(var_765.as_str());
}
if input.dds_x_coordinate != 0 {
object.key("ddsXCoordinate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.dds_x_coordinate).into()),
);
}
if input.dds_y_coordinate != 0 {
object.key("ddsYCoordinate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.dds_y_coordinate).into()),
);
}
if let Some(var_766) = &input.fallback_font {
object.key("fallbackFont").string(var_766.as_str());
}
if let Some(var_767) = &input.font_color {
object.key("fontColor").string(var_767.as_str());
}
if input.font_opacity != 0 {
object.key("fontOpacity").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.font_opacity).into()),
);
}
if input.font_resolution != 0 {
object.key("fontResolution").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.font_resolution).into()),
);
}
if let Some(var_768) = &input.font_script {
object.key("fontScript").string(var_768.as_str());
}
if input.font_size != 0 {
object.key("fontSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.font_size).into()),
);
}
if input.height != 0 {
object.key("height").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.height).into()),
);
}
if let Some(var_769) = &input.hex_font_color {
object.key("hexFontColor").string(var_769.as_str());
}
if let Some(var_770) = &input.outline_color {
object.key("outlineColor").string(var_770.as_str());
}
if input.outline_size != 0 {
object.key("outlineSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.outline_size).into()),
);
}
if let Some(var_771) = &input.shadow_color {
object.key("shadowColor").string(var_771.as_str());
}
if input.shadow_opacity != 0 {
object.key("shadowOpacity").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.shadow_opacity).into()),
);
}
if input.shadow_x_offset != 0 {
object.key("shadowXOffset").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.shadow_x_offset).into()),
);
}
if input.shadow_y_offset != 0 {
object.key("shadowYOffset").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.shadow_y_offset).into()),
);
}
if let Some(var_772) = &input.style_passthrough {
object.key("stylePassthrough").string(var_772.as_str());
}
if let Some(var_773) = &input.subtitling_type {
object.key("subtitlingType").string(var_773.as_str());
}
if let Some(var_774) = &input.teletext_spacing {
object.key("teletextSpacing").string(var_774.as_str());
}
if input.width != 0 {
object.key("width").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.width).into()),
);
}
if input.x_position != 0 {
object.key("xPosition").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.x_position).into()),
);
}
if input.y_position != 0 {
object.key("yPosition").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.y_position).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_embedded_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::EmbeddedDestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.destination608_channel_number != 0 {
object.key("destination608ChannelNumber").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.destination608_channel_number).into()),
);
}
if input.destination708_service_number != 0 {
object.key("destination708ServiceNumber").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.destination708_service_number).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_imsc_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ImscDestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_775) = &input.accessibility {
object.key("accessibility").string(var_775.as_str());
}
if let Some(var_776) = &input.style_passthrough {
object.key("stylePassthrough").string(var_776.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_scc_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::SccDestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_777) = &input.framerate {
object.key("framerate").string(var_777.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_srt_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::SrtDestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_778) = &input.style_passthrough {
object.key("stylePassthrough").string(var_778.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_teletext_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::TeletextDestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_779) = &input.page_number {
object.key("pageNumber").string(var_779.as_str());
}
if let Some(var_780) = &input.page_types {
let mut array_781 = object.key("pageTypes").start_array();
for item_782 in var_780 {
{
array_781.value().string(item_782.as_str());
}
}
array_781.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_ttml_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::TtmlDestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_783) = &input.style_passthrough {
object.key("stylePassthrough").string(var_783.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_webvtt_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::WebvttDestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_784) = &input.accessibility {
object.key("accessibility").string(var_784.as_str());
}
if let Some(var_785) = &input.style_passthrough {
object.key("stylePassthrough").string(var_785.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_dvb_nit_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DvbNitSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.network_id != 0 {
object.key("networkId").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.network_id).into()),
);
}
if let Some(var_786) = &input.network_name {
object.key("networkName").string(var_786.as_str());
}
if input.nit_interval != 0 {
object.key("nitInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.nit_interval).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_dvb_sdt_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DvbSdtSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_787) = &input.output_sdt {
object.key("outputSdt").string(var_787.as_str());
}
if input.sdt_interval != 0 {
object.key("sdtInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.sdt_interval).into()),
);
}
if let Some(var_788) = &input.service_name {
object.key("serviceName").string(var_788.as_str());
}
if let Some(var_789) = &input.service_provider_name {
object.key("serviceProviderName").string(var_789.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_dvb_tdt_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DvbTdtSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.tdt_interval != 0 {
object.key("tdtInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.tdt_interval).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_m2ts_scte35_esam(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::M2tsScte35Esam,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.scte35_esam_pid != 0 {
object.key("scte35EsamPid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.scte35_esam_pid).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_mxf_xavc_profile_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MxfXavcProfileSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_790) = &input.duration_mode {
object.key("durationMode").string(var_790.as_str());
}
if input.max_anc_data_size != 0 {
object.key("maxAncDataSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_anc_data_size).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_av1_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Av1Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_791) = &input.adaptive_quantization {
object.key("adaptiveQuantization").string(var_791.as_str());
}
if let Some(var_792) = &input.bit_depth {
object.key("bitDepth").string(var_792.as_str());
}
if let Some(var_793) = &input.framerate_control {
object.key("framerateControl").string(var_793.as_str());
}
if let Some(var_794) = &input.framerate_conversion_algorithm {
object
.key("framerateConversionAlgorithm")
.string(var_794.as_str());
}
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
if input.gop_size != 0.0 {
object.key("gopSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.gop_size).into()),
);
}
if input.max_bitrate != 0 {
object.key("maxBitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_bitrate).into()),
);
}
if input.number_b_frames_between_reference_frames != 0 {
object.key("numberBFramesBetweenReferenceFrames").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt(
(input.number_b_frames_between_reference_frames).into(),
),
);
}
if let Some(var_795) = &input.qvbr_settings {
let mut object_796 = object.key("qvbrSettings").start_object();
crate::json_ser::serialize_structure_crate_model_av1_qvbr_settings(
&mut object_796,
var_795,
)?;
object_796.finish();
}
if let Some(var_797) = &input.rate_control_mode {
object.key("rateControlMode").string(var_797.as_str());
}
if input.slices != 0 {
object.key("slices").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.slices).into()),
);
}
if let Some(var_798) = &input.spatial_adaptive_quantization {
object
.key("spatialAdaptiveQuantization")
.string(var_798.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_avc_intra_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AvcIntraSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_799) = &input.avc_intra_class {
object.key("avcIntraClass").string(var_799.as_str());
}
if let Some(var_800) = &input.avc_intra_uhd_settings {
let mut object_801 = object.key("avcIntraUhdSettings").start_object();
crate::json_ser::serialize_structure_crate_model_avc_intra_uhd_settings(
&mut object_801,
var_800,
)?;
object_801.finish();
}
if let Some(var_802) = &input.framerate_control {
object.key("framerateControl").string(var_802.as_str());
}
if let Some(var_803) = &input.framerate_conversion_algorithm {
object
.key("framerateConversionAlgorithm")
.string(var_803.as_str());
}
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
if let Some(var_804) = &input.interlace_mode {
object.key("interlaceMode").string(var_804.as_str());
}
if let Some(var_805) = &input.scan_type_conversion_mode {
object
.key("scanTypeConversionMode")
.string(var_805.as_str());
}
if let Some(var_806) = &input.slow_pal {
object.key("slowPal").string(var_806.as_str());
}
if let Some(var_807) = &input.telecine {
object.key("telecine").string(var_807.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_frame_capture_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::FrameCaptureSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
if input.max_captures != 0 {
object.key("maxCaptures").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_captures).into()),
);
}
if input.quality != 0 {
object.key("quality").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.quality).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_h264_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::H264Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_808) = &input.adaptive_quantization {
object.key("adaptiveQuantization").string(var_808.as_str());
}
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if let Some(var_809) = &input.codec_level {
object.key("codecLevel").string(var_809.as_str());
}
if let Some(var_810) = &input.codec_profile {
object.key("codecProfile").string(var_810.as_str());
}
if let Some(var_811) = &input.dynamic_sub_gop {
object.key("dynamicSubGop").string(var_811.as_str());
}
if let Some(var_812) = &input.entropy_encoding {
object.key("entropyEncoding").string(var_812.as_str());
}
if let Some(var_813) = &input.field_encoding {
object.key("fieldEncoding").string(var_813.as_str());
}
if let Some(var_814) = &input.flicker_adaptive_quantization {
object
.key("flickerAdaptiveQuantization")
.string(var_814.as_str());
}
if let Some(var_815) = &input.framerate_control {
object.key("framerateControl").string(var_815.as_str());
}
if let Some(var_816) = &input.framerate_conversion_algorithm {
object
.key("framerateConversionAlgorithm")
.string(var_816.as_str());
}
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
if let Some(var_817) = &input.gop_b_reference {
object.key("gopBReference").string(var_817.as_str());
}
if input.gop_closed_cadence != 0 {
object.key("gopClosedCadence").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.gop_closed_cadence).into()),
);
}
if input.gop_size != 0.0 {
object.key("gopSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.gop_size).into()),
);
}
if let Some(var_818) = &input.gop_size_units {
object.key("gopSizeUnits").string(var_818.as_str());
}
if input.hrd_buffer_initial_fill_percentage != 0 {
object.key("hrdBufferInitialFillPercentage").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.hrd_buffer_initial_fill_percentage).into()),
);
}
if input.hrd_buffer_size != 0 {
object.key("hrdBufferSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.hrd_buffer_size).into()),
);
}
if let Some(var_819) = &input.interlace_mode {
object.key("interlaceMode").string(var_819.as_str());
}
if input.max_bitrate != 0 {
object.key("maxBitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_bitrate).into()),
);
}
if input.min_i_interval != 0 {
object.key("minIInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.min_i_interval).into()),
);
}
if input.number_b_frames_between_reference_frames != 0 {
object.key("numberBFramesBetweenReferenceFrames").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt(
(input.number_b_frames_between_reference_frames).into(),
),
);
}
if input.number_reference_frames != 0 {
object.key("numberReferenceFrames").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.number_reference_frames).into()),
);
}
if let Some(var_820) = &input.par_control {
object.key("parControl").string(var_820.as_str());
}
if input.par_denominator != 0 {
object.key("parDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_denominator).into()),
);
}
if input.par_numerator != 0 {
object.key("parNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_numerator).into()),
);
}
if let Some(var_821) = &input.quality_tuning_level {
object.key("qualityTuningLevel").string(var_821.as_str());
}
if let Some(var_822) = &input.qvbr_settings {
let mut object_823 = object.key("qvbrSettings").start_object();
crate::json_ser::serialize_structure_crate_model_h264_qvbr_settings(
&mut object_823,
var_822,
)?;
object_823.finish();
}
if let Some(var_824) = &input.rate_control_mode {
object.key("rateControlMode").string(var_824.as_str());
}
if let Some(var_825) = &input.repeat_pps {
object.key("repeatPps").string(var_825.as_str());
}
if let Some(var_826) = &input.scan_type_conversion_mode {
object
.key("scanTypeConversionMode")
.string(var_826.as_str());
}
if let Some(var_827) = &input.scene_change_detect {
object.key("sceneChangeDetect").string(var_827.as_str());
}
if input.slices != 0 {
object.key("slices").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.slices).into()),
);
}
if let Some(var_828) = &input.slow_pal {
object.key("slowPal").string(var_828.as_str());
}
if input.softness != 0 {
object.key("softness").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.softness).into()),
);
}
if let Some(var_829) = &input.spatial_adaptive_quantization {
object
.key("spatialAdaptiveQuantization")
.string(var_829.as_str());
}
if let Some(var_830) = &input.syntax {
object.key("syntax").string(var_830.as_str());
}
if let Some(var_831) = &input.telecine {
object.key("telecine").string(var_831.as_str());
}
if let Some(var_832) = &input.temporal_adaptive_quantization {
object
.key("temporalAdaptiveQuantization")
.string(var_832.as_str());
}
if let Some(var_833) = &input.unregistered_sei_timecode {
object
.key("unregisteredSeiTimecode")
.string(var_833.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_h265_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::H265Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_834) = &input.adaptive_quantization {
object.key("adaptiveQuantization").string(var_834.as_str());
}
if let Some(var_835) = &input.alternate_transfer_function_sei {
object
.key("alternateTransferFunctionSei")
.string(var_835.as_str());
}
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if let Some(var_836) = &input.codec_level {
object.key("codecLevel").string(var_836.as_str());
}
if let Some(var_837) = &input.codec_profile {
object.key("codecProfile").string(var_837.as_str());
}
if let Some(var_838) = &input.dynamic_sub_gop {
object.key("dynamicSubGop").string(var_838.as_str());
}
if let Some(var_839) = &input.flicker_adaptive_quantization {
object
.key("flickerAdaptiveQuantization")
.string(var_839.as_str());
}
if let Some(var_840) = &input.framerate_control {
object.key("framerateControl").string(var_840.as_str());
}
if let Some(var_841) = &input.framerate_conversion_algorithm {
object
.key("framerateConversionAlgorithm")
.string(var_841.as_str());
}
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
if let Some(var_842) = &input.gop_b_reference {
object.key("gopBReference").string(var_842.as_str());
}
if input.gop_closed_cadence != 0 {
object.key("gopClosedCadence").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.gop_closed_cadence).into()),
);
}
if input.gop_size != 0.0 {
object.key("gopSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.gop_size).into()),
);
}
if let Some(var_843) = &input.gop_size_units {
object.key("gopSizeUnits").string(var_843.as_str());
}
if input.hrd_buffer_initial_fill_percentage != 0 {
object.key("hrdBufferInitialFillPercentage").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.hrd_buffer_initial_fill_percentage).into()),
);
}
if input.hrd_buffer_size != 0 {
object.key("hrdBufferSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.hrd_buffer_size).into()),
);
}
if let Some(var_844) = &input.interlace_mode {
object.key("interlaceMode").string(var_844.as_str());
}
if input.max_bitrate != 0 {
object.key("maxBitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_bitrate).into()),
);
}
if input.min_i_interval != 0 {
object.key("minIInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.min_i_interval).into()),
);
}
if input.number_b_frames_between_reference_frames != 0 {
object.key("numberBFramesBetweenReferenceFrames").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt(
(input.number_b_frames_between_reference_frames).into(),
),
);
}
if input.number_reference_frames != 0 {
object.key("numberReferenceFrames").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.number_reference_frames).into()),
);
}
if let Some(var_845) = &input.par_control {
object.key("parControl").string(var_845.as_str());
}
if input.par_denominator != 0 {
object.key("parDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_denominator).into()),
);
}
if input.par_numerator != 0 {
object.key("parNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_numerator).into()),
);
}
if let Some(var_846) = &input.quality_tuning_level {
object.key("qualityTuningLevel").string(var_846.as_str());
}
if let Some(var_847) = &input.qvbr_settings {
let mut object_848 = object.key("qvbrSettings").start_object();
crate::json_ser::serialize_structure_crate_model_h265_qvbr_settings(
&mut object_848,
var_847,
)?;
object_848.finish();
}
if let Some(var_849) = &input.rate_control_mode {
object.key("rateControlMode").string(var_849.as_str());
}
if let Some(var_850) = &input.sample_adaptive_offset_filter_mode {
object
.key("sampleAdaptiveOffsetFilterMode")
.string(var_850.as_str());
}
if let Some(var_851) = &input.scan_type_conversion_mode {
object
.key("scanTypeConversionMode")
.string(var_851.as_str());
}
if let Some(var_852) = &input.scene_change_detect {
object.key("sceneChangeDetect").string(var_852.as_str());
}
if input.slices != 0 {
object.key("slices").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.slices).into()),
);
}
if let Some(var_853) = &input.slow_pal {
object.key("slowPal").string(var_853.as_str());
}
if let Some(var_854) = &input.spatial_adaptive_quantization {
object
.key("spatialAdaptiveQuantization")
.string(var_854.as_str());
}
if let Some(var_855) = &input.telecine {
object.key("telecine").string(var_855.as_str());
}
if let Some(var_856) = &input.temporal_adaptive_quantization {
object
.key("temporalAdaptiveQuantization")
.string(var_856.as_str());
}
if let Some(var_857) = &input.temporal_ids {
object.key("temporalIds").string(var_857.as_str());
}
if let Some(var_858) = &input.tiles {
object.key("tiles").string(var_858.as_str());
}
if let Some(var_859) = &input.unregistered_sei_timecode {
object
.key("unregisteredSeiTimecode")
.string(var_859.as_str());
}
if let Some(var_860) = &input.write_mp4_packaging_type {
object.key("writeMp4PackagingType").string(var_860.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_mpeg2_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Mpeg2Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_861) = &input.adaptive_quantization {
object.key("adaptiveQuantization").string(var_861.as_str());
}
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if let Some(var_862) = &input.codec_level {
object.key("codecLevel").string(var_862.as_str());
}
if let Some(var_863) = &input.codec_profile {
object.key("codecProfile").string(var_863.as_str());
}
if let Some(var_864) = &input.dynamic_sub_gop {
object.key("dynamicSubGop").string(var_864.as_str());
}
if let Some(var_865) = &input.framerate_control {
object.key("framerateControl").string(var_865.as_str());
}
if let Some(var_866) = &input.framerate_conversion_algorithm {
object
.key("framerateConversionAlgorithm")
.string(var_866.as_str());
}
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
if input.gop_closed_cadence != 0 {
object.key("gopClosedCadence").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.gop_closed_cadence).into()),
);
}
if input.gop_size != 0.0 {
object.key("gopSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.gop_size).into()),
);
}
if let Some(var_867) = &input.gop_size_units {
object.key("gopSizeUnits").string(var_867.as_str());
}
if input.hrd_buffer_initial_fill_percentage != 0 {
object.key("hrdBufferInitialFillPercentage").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.hrd_buffer_initial_fill_percentage).into()),
);
}
if input.hrd_buffer_size != 0 {
object.key("hrdBufferSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.hrd_buffer_size).into()),
);
}
if let Some(var_868) = &input.interlace_mode {
object.key("interlaceMode").string(var_868.as_str());
}
if let Some(var_869) = &input.intra_dc_precision {
object.key("intraDcPrecision").string(var_869.as_str());
}
if input.max_bitrate != 0 {
object.key("maxBitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_bitrate).into()),
);
}
if input.min_i_interval != 0 {
object.key("minIInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.min_i_interval).into()),
);
}
if input.number_b_frames_between_reference_frames != 0 {
object.key("numberBFramesBetweenReferenceFrames").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt(
(input.number_b_frames_between_reference_frames).into(),
),
);
}
if let Some(var_870) = &input.par_control {
object.key("parControl").string(var_870.as_str());
}
if input.par_denominator != 0 {
object.key("parDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_denominator).into()),
);
}
if input.par_numerator != 0 {
object.key("parNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_numerator).into()),
);
}
if let Some(var_871) = &input.quality_tuning_level {
object.key("qualityTuningLevel").string(var_871.as_str());
}
if let Some(var_872) = &input.rate_control_mode {
object.key("rateControlMode").string(var_872.as_str());
}
if let Some(var_873) = &input.scan_type_conversion_mode {
object
.key("scanTypeConversionMode")
.string(var_873.as_str());
}
if let Some(var_874) = &input.scene_change_detect {
object.key("sceneChangeDetect").string(var_874.as_str());
}
if let Some(var_875) = &input.slow_pal {
object.key("slowPal").string(var_875.as_str());
}
if input.softness != 0 {
object.key("softness").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.softness).into()),
);
}
if let Some(var_876) = &input.spatial_adaptive_quantization {
object
.key("spatialAdaptiveQuantization")
.string(var_876.as_str());
}
if let Some(var_877) = &input.syntax {
object.key("syntax").string(var_877.as_str());
}
if let Some(var_878) = &input.telecine {
object.key("telecine").string(var_878.as_str());
}
if let Some(var_879) = &input.temporal_adaptive_quantization {
object
.key("temporalAdaptiveQuantization")
.string(var_879.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_prores_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ProresSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_880) = &input.chroma_sampling {
object.key("chromaSampling").string(var_880.as_str());
}
if let Some(var_881) = &input.codec_profile {
object.key("codecProfile").string(var_881.as_str());
}
if let Some(var_882) = &input.framerate_control {
object.key("framerateControl").string(var_882.as_str());
}
if let Some(var_883) = &input.framerate_conversion_algorithm {
object
.key("framerateConversionAlgorithm")
.string(var_883.as_str());
}
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
if let Some(var_884) = &input.interlace_mode {
object.key("interlaceMode").string(var_884.as_str());
}
if let Some(var_885) = &input.par_control {
object.key("parControl").string(var_885.as_str());
}
if input.par_denominator != 0 {
object.key("parDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_denominator).into()),
);
}
if input.par_numerator != 0 {
object.key("parNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_numerator).into()),
);
}
if let Some(var_886) = &input.scan_type_conversion_mode {
object
.key("scanTypeConversionMode")
.string(var_886.as_str());
}
if let Some(var_887) = &input.slow_pal {
object.key("slowPal").string(var_887.as_str());
}
if let Some(var_888) = &input.telecine {
object.key("telecine").string(var_888.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_vc3_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Vc3Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_889) = &input.framerate_control {
object.key("framerateControl").string(var_889.as_str());
}
if let Some(var_890) = &input.framerate_conversion_algorithm {
object
.key("framerateConversionAlgorithm")
.string(var_890.as_str());
}
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
if let Some(var_891) = &input.interlace_mode {
object.key("interlaceMode").string(var_891.as_str());
}
if let Some(var_892) = &input.scan_type_conversion_mode {
object
.key("scanTypeConversionMode")
.string(var_892.as_str());
}
if let Some(var_893) = &input.slow_pal {
object.key("slowPal").string(var_893.as_str());
}
if let Some(var_894) = &input.telecine {
object.key("telecine").string(var_894.as_str());
}
if let Some(var_895) = &input.vc3_class {
object.key("vc3Class").string(var_895.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_vp8_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Vp8Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if let Some(var_896) = &input.framerate_control {
object.key("framerateControl").string(var_896.as_str());
}
if let Some(var_897) = &input.framerate_conversion_algorithm {
object
.key("framerateConversionAlgorithm")
.string(var_897.as_str());
}
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
if input.gop_size != 0.0 {
object.key("gopSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.gop_size).into()),
);
}
if input.hrd_buffer_size != 0 {
object.key("hrdBufferSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.hrd_buffer_size).into()),
);
}
if input.max_bitrate != 0 {
object.key("maxBitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_bitrate).into()),
);
}
if let Some(var_898) = &input.par_control {
object.key("parControl").string(var_898.as_str());
}
if input.par_denominator != 0 {
object.key("parDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_denominator).into()),
);
}
if input.par_numerator != 0 {
object.key("parNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_numerator).into()),
);
}
if let Some(var_899) = &input.quality_tuning_level {
object.key("qualityTuningLevel").string(var_899.as_str());
}
if let Some(var_900) = &input.rate_control_mode {
object.key("rateControlMode").string(var_900.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_vp9_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Vp9Settings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.bitrate != 0 {
object.key("bitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.bitrate).into()),
);
}
if let Some(var_901) = &input.framerate_control {
object.key("framerateControl").string(var_901.as_str());
}
if let Some(var_902) = &input.framerate_conversion_algorithm {
object
.key("framerateConversionAlgorithm")
.string(var_902.as_str());
}
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
if input.gop_size != 0.0 {
object.key("gopSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.gop_size).into()),
);
}
if input.hrd_buffer_size != 0 {
object.key("hrdBufferSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.hrd_buffer_size).into()),
);
}
if input.max_bitrate != 0 {
object.key("maxBitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_bitrate).into()),
);
}
if let Some(var_903) = &input.par_control {
object.key("parControl").string(var_903.as_str());
}
if input.par_denominator != 0 {
object.key("parDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_denominator).into()),
);
}
if input.par_numerator != 0 {
object.key("parNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.par_numerator).into()),
);
}
if let Some(var_904) = &input.quality_tuning_level {
object.key("qualityTuningLevel").string(var_904.as_str());
}
if let Some(var_905) = &input.rate_control_mode {
object.key("rateControlMode").string(var_905.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_xavc_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::XavcSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_906) = &input.adaptive_quantization {
object.key("adaptiveQuantization").string(var_906.as_str());
}
if let Some(var_907) = &input.entropy_encoding {
object.key("entropyEncoding").string(var_907.as_str());
}
if let Some(var_908) = &input.framerate_control {
object.key("framerateControl").string(var_908.as_str());
}
if let Some(var_909) = &input.framerate_conversion_algorithm {
object
.key("framerateConversionAlgorithm")
.string(var_909.as_str());
}
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
if let Some(var_910) = &input.profile {
object.key("profile").string(var_910.as_str());
}
if let Some(var_911) = &input.slow_pal {
object.key("slowPal").string(var_911.as_str());
}
if input.softness != 0 {
object.key("softness").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.softness).into()),
);
}
if let Some(var_912) = &input.spatial_adaptive_quantization {
object
.key("spatialAdaptiveQuantization")
.string(var_912.as_str());
}
if let Some(var_913) = &input.temporal_adaptive_quantization {
object
.key("temporalAdaptiveQuantization")
.string(var_913.as_str());
}
if let Some(var_914) = &input.xavc4k_intra_cbg_profile_settings {
let mut object_915 = object.key("xavc4kIntraCbgProfileSettings").start_object();
crate::json_ser::serialize_structure_crate_model_xavc4k_intra_cbg_profile_settings(
&mut object_915,
var_914,
)?;
object_915.finish();
}
if let Some(var_916) = &input.xavc4k_intra_vbr_profile_settings {
let mut object_917 = object.key("xavc4kIntraVbrProfileSettings").start_object();
crate::json_ser::serialize_structure_crate_model_xavc4k_intra_vbr_profile_settings(
&mut object_917,
var_916,
)?;
object_917.finish();
}
if let Some(var_918) = &input.xavc4k_profile_settings {
let mut object_919 = object.key("xavc4kProfileSettings").start_object();
crate::json_ser::serialize_structure_crate_model_xavc4k_profile_settings(
&mut object_919,
var_918,
)?;
object_919.finish();
}
if let Some(var_920) = &input.xavc_hd_intra_cbg_profile_settings {
let mut object_921 = object.key("xavcHdIntraCbgProfileSettings").start_object();
crate::json_ser::serialize_structure_crate_model_xavc_hd_intra_cbg_profile_settings(
&mut object_921,
var_920,
)?;
object_921.finish();
}
if let Some(var_922) = &input.xavc_hd_profile_settings {
let mut object_923 = object.key("xavcHdProfileSettings").start_object();
crate::json_ser::serialize_structure_crate_model_xavc_hd_profile_settings(
&mut object_923,
var_922,
)?;
object_923.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_color_corrector(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ColorCorrector,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.brightness != 0 {
object.key("brightness").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.brightness).into()),
);
}
if let Some(var_924) = &input.color_space_conversion {
object.key("colorSpaceConversion").string(var_924.as_str());
}
if input.contrast != 0 {
object.key("contrast").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.contrast).into()),
);
}
if let Some(var_925) = &input.hdr10_metadata {
let mut object_926 = object.key("hdr10Metadata").start_object();
crate::json_ser::serialize_structure_crate_model_hdr10_metadata(&mut object_926, var_925)?;
object_926.finish();
}
if input.hue != 0 {
object.key("hue").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.hue).into()),
);
}
if let Some(var_927) = &input.sample_range_conversion {
object.key("sampleRangeConversion").string(var_927.as_str());
}
if input.saturation != 0 {
object.key("saturation").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.saturation).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_deinterlacer(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Deinterlacer,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_928) = &input.algorithm {
object.key("algorithm").string(var_928.as_str());
}
if let Some(var_929) = &input.control {
object.key("control").string(var_929.as_str());
}
if let Some(var_930) = &input.mode {
object.key("mode").string(var_930.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_dolby_vision(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DolbyVision,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_931) = &input.l6_metadata {
let mut object_932 = object.key("l6Metadata").start_object();
crate::json_ser::serialize_structure_crate_model_dolby_vision_level6_metadata(
&mut object_932,
var_931,
)?;
object_932.finish();
}
if let Some(var_933) = &input.l6_mode {
object.key("l6Mode").string(var_933.as_str());
}
if let Some(var_934) = &input.mapping {
object.key("mapping").string(var_934.as_str());
}
if let Some(var_935) = &input.profile {
object.key("profile").string(var_935.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_hdr10_plus(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Hdr10Plus,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.mastering_monitor_nits != 0 {
object.key("masteringMonitorNits").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.mastering_monitor_nits).into()),
);
}
if input.target_monitor_nits != 0 {
object.key("targetMonitorNits").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.target_monitor_nits).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_noise_reducer(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::NoiseReducer,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_936) = &input.filter {
object.key("filter").string(var_936.as_str());
}
if let Some(var_937) = &input.filter_settings {
let mut object_938 = object.key("filterSettings").start_object();
crate::json_ser::serialize_structure_crate_model_noise_reducer_filter_settings(
&mut object_938,
var_937,
)?;
object_938.finish();
}
if let Some(var_939) = &input.spatial_filter_settings {
let mut object_940 = object.key("spatialFilterSettings").start_object();
crate::json_ser::serialize_structure_crate_model_noise_reducer_spatial_filter_settings(
&mut object_940,
var_939,
)?;
object_940.finish();
}
if let Some(var_941) = &input.temporal_filter_settings {
let mut object_942 = object.key("temporalFilterSettings").start_object();
crate::json_ser::serialize_structure_crate_model_noise_reducer_temporal_filter_settings(
&mut object_942,
var_941,
)?;
object_942.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_partner_watermarking(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::PartnerWatermarking,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_943) = &input.nexguard_file_marker_settings {
let mut object_944 = object.key("nexguardFileMarkerSettings").start_object();
crate::json_ser::serialize_structure_crate_model_nex_guard_file_marker_settings(
&mut object_944,
var_943,
)?;
object_944.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_timecode_burnin(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::TimecodeBurnin,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.font_size != 0 {
object.key("fontSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.font_size).into()),
);
}
if let Some(var_945) = &input.position {
object.key("position").string(var_945.as_str());
}
if let Some(var_946) = &input.prefix {
object.key("prefix").string(var_946.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_ancillary_source_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AncillarySourceSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_947) = &input.convert608_to708 {
object.key("convert608To708").string(var_947.as_str());
}
if input.source_ancillary_channel_number != 0 {
object.key("sourceAncillaryChannelNumber").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.source_ancillary_channel_number).into()),
);
}
if let Some(var_948) = &input.terminate_captions {
object.key("terminateCaptions").string(var_948.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_dvb_sub_source_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DvbSubSourceSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.pid != 0 {
object.key("pid").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.pid).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_embedded_source_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::EmbeddedSourceSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_949) = &input.convert608_to708 {
object.key("convert608To708").string(var_949.as_str());
}
if input.source608_channel_number != 0 {
object.key("source608ChannelNumber").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.source608_channel_number).into()),
);
}
if input.source608_track_number != 0 {
object.key("source608TrackNumber").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.source608_track_number).into()),
);
}
if let Some(var_950) = &input.terminate_captions {
object.key("terminateCaptions").string(var_950.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_file_source_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::FileSourceSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_951) = &input.convert608_to708 {
object.key("convert608To708").string(var_951.as_str());
}
if let Some(var_952) = &input.framerate {
let mut object_953 = object.key("framerate").start_object();
crate::json_ser::serialize_structure_crate_model_caption_source_framerate(
&mut object_953,
var_952,
)?;
object_953.finish();
}
if let Some(var_954) = &input.source_file {
object.key("sourceFile").string(var_954.as_str());
}
if input.time_delta != 0 {
object.key("timeDelta").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.time_delta).into()),
);
}
if let Some(var_955) = &input.time_delta_units {
object.key("timeDeltaUnits").string(var_955.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_teletext_source_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::TeletextSourceSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_956) = &input.page_number {
object.key("pageNumber").string(var_956.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_track_source_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::TrackSourceSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.track_number != 0 {
object.key("trackNumber").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.track_number).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_webvtt_hls_source_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::WebvttHlsSourceSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_957) = &input.rendition_group_id {
object.key("renditionGroupId").string(var_957.as_str());
}
if let Some(var_958) = &input.rendition_language_code {
object.key("renditionLanguageCode").string(var_958.as_str());
}
if let Some(var_959) = &input.rendition_name {
object.key("renditionName").string(var_959.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_automated_abr_rule(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AutomatedAbrRule,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_960) = &input.allowed_renditions {
let mut array_961 = object.key("allowedRenditions").start_array();
for item_962 in var_960 {
{
let mut object_963 = array_961.value().start_object();
crate::json_ser::serialize_structure_crate_model_allowed_rendition_size(
&mut object_963,
item_962,
)?;
object_963.finish();
}
}
array_961.finish();
}
if let Some(var_964) = &input.force_include_renditions {
let mut array_965 = object.key("forceIncludeRenditions").start_array();
for item_966 in var_964 {
{
let mut object_967 = array_965.value().start_object();
crate::json_ser::serialize_structure_crate_model_force_include_rendition_size(
&mut object_967,
item_966,
)?;
object_967.finish();
}
}
array_965.finish();
}
if let Some(var_968) = &input.min_bottom_rendition_size {
let mut object_969 = object.key("minBottomRenditionSize").start_object();
crate::json_ser::serialize_structure_crate_model_min_bottom_rendition_size(
&mut object_969,
var_968,
)?;
object_969.finish();
}
if let Some(var_970) = &input.min_top_rendition_size {
let mut object_971 = object.key("minTopRenditionSize").start_object();
crate::json_ser::serialize_structure_crate_model_min_top_rendition_size(
&mut object_971,
var_970,
)?;
object_971.finish();
}
if let Some(var_972) = &input.r#type {
object.key("type").string(var_972.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_cmaf_additional_manifest(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CmafAdditionalManifest,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_973) = &input.manifest_name_modifier {
object.key("manifestNameModifier").string(var_973.as_str());
}
if let Some(var_974) = &input.selected_outputs {
let mut array_975 = object.key("selectedOutputs").start_array();
for item_976 in var_974 {
{
array_975.value().string(item_976.as_str());
}
}
array_975.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_977) = &input.s3_settings {
let mut object_978 = object.key("s3Settings").start_object();
crate::json_ser::serialize_structure_crate_model_s3_destination_settings(
&mut object_978,
var_977,
)?;
object_978.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_cmaf_encryption_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CmafEncryptionSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_979) = &input.constant_initialization_vector {
object
.key("constantInitializationVector")
.string(var_979.as_str());
}
if let Some(var_980) = &input.encryption_method {
object.key("encryptionMethod").string(var_980.as_str());
}
if let Some(var_981) = &input.initialization_vector_in_manifest {
object
.key("initializationVectorInManifest")
.string(var_981.as_str());
}
if let Some(var_982) = &input.speke_key_provider {
let mut object_983 = object.key("spekeKeyProvider").start_object();
crate::json_ser::serialize_structure_crate_model_speke_key_provider_cmaf(
&mut object_983,
var_982,
)?;
object_983.finish();
}
if let Some(var_984) = &input.static_key_provider {
let mut object_985 = object.key("staticKeyProvider").start_object();
crate::json_ser::serialize_structure_crate_model_static_key_provider(
&mut object_985,
var_984,
)?;
object_985.finish();
}
if let Some(var_986) = &input.r#type {
object.key("type").string(var_986.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_cmaf_image_based_trick_play_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CmafImageBasedTrickPlaySettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_987) = &input.interval_cadence {
object.key("intervalCadence").string(var_987.as_str());
}
if input.thumbnail_height != 0 {
object.key("thumbnailHeight").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.thumbnail_height).into()),
);
}
if input.thumbnail_interval != 0.0 {
object.key("thumbnailInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.thumbnail_interval).into()),
);
}
if input.thumbnail_width != 0 {
object.key("thumbnailWidth").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.thumbnail_width).into()),
);
}
if input.tile_height != 0 {
object.key("tileHeight").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.tile_height).into()),
);
}
if input.tile_width != 0 {
object.key("tileWidth").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.tile_width).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_dash_additional_manifest(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DashAdditionalManifest,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_988) = &input.manifest_name_modifier {
object.key("manifestNameModifier").string(var_988.as_str());
}
if let Some(var_989) = &input.selected_outputs {
let mut array_990 = object.key("selectedOutputs").start_array();
for item_991 in var_989 {
{
array_990.value().string(item_991.as_str());
}
}
array_990.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_dash_iso_encryption_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DashIsoEncryptionSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_992) = &input.playback_device_compatibility {
object
.key("playbackDeviceCompatibility")
.string(var_992.as_str());
}
if let Some(var_993) = &input.speke_key_provider {
let mut object_994 = object.key("spekeKeyProvider").start_object();
crate::json_ser::serialize_structure_crate_model_speke_key_provider(
&mut object_994,
var_993,
)?;
object_994.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_dash_iso_image_based_trick_play_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DashIsoImageBasedTrickPlaySettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_995) = &input.interval_cadence {
object.key("intervalCadence").string(var_995.as_str());
}
if input.thumbnail_height != 0 {
object.key("thumbnailHeight").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.thumbnail_height).into()),
);
}
if input.thumbnail_interval != 0.0 {
object.key("thumbnailInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.thumbnail_interval).into()),
);
}
if input.thumbnail_width != 0 {
object.key("thumbnailWidth").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.thumbnail_width).into()),
);
}
if input.tile_height != 0 {
object.key("tileHeight").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.tile_height).into()),
);
}
if input.tile_width != 0 {
object.key("tileWidth").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.tile_width).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_hls_additional_manifest(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::HlsAdditionalManifest,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_996) = &input.manifest_name_modifier {
object.key("manifestNameModifier").string(var_996.as_str());
}
if let Some(var_997) = &input.selected_outputs {
let mut array_998 = object.key("selectedOutputs").start_array();
for item_999 in var_997 {
{
array_998.value().string(item_999.as_str());
}
}
array_998.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_hls_caption_language_mapping(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::HlsCaptionLanguageMapping,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.caption_channel != 0 {
object.key("captionChannel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.caption_channel).into()),
);
}
if let Some(var_1000) = &input.custom_language_code {
object.key("customLanguageCode").string(var_1000.as_str());
}
if let Some(var_1001) = &input.language_code {
object.key("languageCode").string(var_1001.as_str());
}
if let Some(var_1002) = &input.language_description {
object.key("languageDescription").string(var_1002.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_hls_encryption_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::HlsEncryptionSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1003) = &input.constant_initialization_vector {
object
.key("constantInitializationVector")
.string(var_1003.as_str());
}
if let Some(var_1004) = &input.encryption_method {
object.key("encryptionMethod").string(var_1004.as_str());
}
if let Some(var_1005) = &input.initialization_vector_in_manifest {
object
.key("initializationVectorInManifest")
.string(var_1005.as_str());
}
if let Some(var_1006) = &input.offline_encrypted {
object.key("offlineEncrypted").string(var_1006.as_str());
}
if let Some(var_1007) = &input.speke_key_provider {
let mut object_1008 = object.key("spekeKeyProvider").start_object();
crate::json_ser::serialize_structure_crate_model_speke_key_provider(
&mut object_1008,
var_1007,
)?;
object_1008.finish();
}
if let Some(var_1009) = &input.static_key_provider {
let mut object_1010 = object.key("staticKeyProvider").start_object();
crate::json_ser::serialize_structure_crate_model_static_key_provider(
&mut object_1010,
var_1009,
)?;
object_1010.finish();
}
if let Some(var_1011) = &input.r#type {
object.key("type").string(var_1011.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_hls_image_based_trick_play_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::HlsImageBasedTrickPlaySettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1012) = &input.interval_cadence {
object.key("intervalCadence").string(var_1012.as_str());
}
if input.thumbnail_height != 0 {
object.key("thumbnailHeight").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.thumbnail_height).into()),
);
}
if input.thumbnail_interval != 0.0 {
object.key("thumbnailInterval").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.thumbnail_interval).into()),
);
}
if input.thumbnail_width != 0 {
object.key("thumbnailWidth").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.thumbnail_width).into()),
);
}
if input.tile_height != 0 {
object.key("tileHeight").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.tile_height).into()),
);
}
if input.tile_width != 0 {
object.key("tileWidth").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.tile_width).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_ms_smooth_additional_manifest(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MsSmoothAdditionalManifest,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1013) = &input.manifest_name_modifier {
object.key("manifestNameModifier").string(var_1013.as_str());
}
if let Some(var_1014) = &input.selected_outputs {
let mut array_1015 = object.key("selectedOutputs").start_array();
for item_1016 in var_1014 {
{
array_1015.value().string(item_1016.as_str());
}
}
array_1015.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_ms_smooth_encryption_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MsSmoothEncryptionSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1017) = &input.speke_key_provider {
let mut object_1018 = object.key("spekeKeyProvider").start_object();
crate::json_ser::serialize_structure_crate_model_speke_key_provider(
&mut object_1018,
var_1017,
)?;
object_1018.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_hls_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::HlsSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1019) = &input.audio_group_id {
object.key("audioGroupId").string(var_1019.as_str());
}
if let Some(var_1020) = &input.audio_only_container {
object.key("audioOnlyContainer").string(var_1020.as_str());
}
if let Some(var_1021) = &input.audio_rendition_sets {
object.key("audioRenditionSets").string(var_1021.as_str());
}
if let Some(var_1022) = &input.audio_track_type {
object.key("audioTrackType").string(var_1022.as_str());
}
if let Some(var_1023) = &input.descriptive_video_service_flag {
object
.key("descriptiveVideoServiceFlag")
.string(var_1023.as_str());
}
if let Some(var_1024) = &input.i_frame_only_manifest {
object.key("iFrameOnlyManifest").string(var_1024.as_str());
}
if let Some(var_1025) = &input.segment_modifier {
object.key("segmentModifier").string(var_1025.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_output_channel_mapping(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::OutputChannelMapping,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1026) = &input.input_channels {
let mut array_1027 = object.key("inputChannels").start_array();
for item_1028 in var_1026 {
{
array_1027.value().number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((*item_1028).into()),
);
}
}
array_1027.finish();
}
if let Some(var_1029) = &input.input_channels_fine_tune {
let mut array_1030 = object.key("inputChannelsFineTune").start_array();
for item_1031 in var_1029 {
{
array_1030.value().number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((*item_1031).into()),
);
}
}
array_1030.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_av1_qvbr_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Av1QvbrSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.qvbr_quality_level != 0 {
object.key("qvbrQualityLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.qvbr_quality_level).into()),
);
}
if input.qvbr_quality_level_fine_tune != 0.0 {
object.key("qvbrQualityLevelFineTune").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.qvbr_quality_level_fine_tune).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_avc_intra_uhd_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AvcIntraUhdSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1032) = &input.quality_tuning_level {
object.key("qualityTuningLevel").string(var_1032.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_h264_qvbr_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::H264QvbrSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.max_average_bitrate != 0 {
object.key("maxAverageBitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_average_bitrate).into()),
);
}
if input.qvbr_quality_level != 0 {
object.key("qvbrQualityLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.qvbr_quality_level).into()),
);
}
if input.qvbr_quality_level_fine_tune != 0.0 {
object.key("qvbrQualityLevelFineTune").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.qvbr_quality_level_fine_tune).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_h265_qvbr_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::H265QvbrSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.max_average_bitrate != 0 {
object.key("maxAverageBitrate").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_average_bitrate).into()),
);
}
if input.qvbr_quality_level != 0 {
object.key("qvbrQualityLevel").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.qvbr_quality_level).into()),
);
}
if input.qvbr_quality_level_fine_tune != 0.0 {
object.key("qvbrQualityLevelFineTune").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::Float((input.qvbr_quality_level_fine_tune).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_xavc4k_intra_cbg_profile_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Xavc4kIntraCbgProfileSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1033) = &input.xavc_class {
object.key("xavcClass").string(var_1033.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_xavc4k_intra_vbr_profile_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Xavc4kIntraVbrProfileSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1034) = &input.xavc_class {
object.key("xavcClass").string(var_1034.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_xavc4k_profile_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Xavc4kProfileSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1035) = &input.bitrate_class {
object.key("bitrateClass").string(var_1035.as_str());
}
if let Some(var_1036) = &input.codec_profile {
object.key("codecProfile").string(var_1036.as_str());
}
if let Some(var_1037) = &input.flicker_adaptive_quantization {
object
.key("flickerAdaptiveQuantization")
.string(var_1037.as_str());
}
if let Some(var_1038) = &input.gop_b_reference {
object.key("gopBReference").string(var_1038.as_str());
}
if input.gop_closed_cadence != 0 {
object.key("gopClosedCadence").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.gop_closed_cadence).into()),
);
}
if input.hrd_buffer_size != 0 {
object.key("hrdBufferSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.hrd_buffer_size).into()),
);
}
if let Some(var_1039) = &input.quality_tuning_level {
object.key("qualityTuningLevel").string(var_1039.as_str());
}
if input.slices != 0 {
object.key("slices").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.slices).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_xavc_hd_intra_cbg_profile_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::XavcHdIntraCbgProfileSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1040) = &input.xavc_class {
object.key("xavcClass").string(var_1040.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_xavc_hd_profile_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::XavcHdProfileSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1041) = &input.bitrate_class {
object.key("bitrateClass").string(var_1041.as_str());
}
if let Some(var_1042) = &input.flicker_adaptive_quantization {
object
.key("flickerAdaptiveQuantization")
.string(var_1042.as_str());
}
if let Some(var_1043) = &input.gop_b_reference {
object.key("gopBReference").string(var_1043.as_str());
}
if input.gop_closed_cadence != 0 {
object.key("gopClosedCadence").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.gop_closed_cadence).into()),
);
}
if input.hrd_buffer_size != 0 {
object.key("hrdBufferSize").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.hrd_buffer_size).into()),
);
}
if let Some(var_1044) = &input.interlace_mode {
object.key("interlaceMode").string(var_1044.as_str());
}
if let Some(var_1045) = &input.quality_tuning_level {
object.key("qualityTuningLevel").string(var_1045.as_str());
}
if input.slices != 0 {
object.key("slices").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.slices).into()),
);
}
if let Some(var_1046) = &input.telecine {
object.key("telecine").string(var_1046.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_dolby_vision_level6_metadata(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DolbyVisionLevel6Metadata,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.max_cll != 0 {
object.key("maxCll").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_cll).into()),
);
}
if input.max_fall != 0 {
object.key("maxFall").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.max_fall).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_noise_reducer_filter_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::NoiseReducerFilterSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.strength != 0 {
object.key("strength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.strength).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_noise_reducer_spatial_filter_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::NoiseReducerSpatialFilterSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.post_filter_sharpen_strength != 0 {
object.key("postFilterSharpenStrength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.post_filter_sharpen_strength).into()),
);
}
if input.speed != 0 {
object.key("speed").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.speed).into()),
);
}
if input.strength != 0 {
object.key("strength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.strength).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_noise_reducer_temporal_filter_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::NoiseReducerTemporalFilterSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.aggressive_mode != 0 {
object.key("aggressiveMode").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.aggressive_mode).into()),
);
}
if let Some(var_1047) = &input.post_temporal_sharpening {
object
.key("postTemporalSharpening")
.string(var_1047.as_str());
}
if let Some(var_1048) = &input.post_temporal_sharpening_strength {
object
.key("postTemporalSharpeningStrength")
.string(var_1048.as_str());
}
if input.speed != 0 {
object.key("speed").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.speed).into()),
);
}
if input.strength != 0 {
object.key("strength").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.strength).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_nex_guard_file_marker_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::NexGuardFileMarkerSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1049) = &input.license {
object.key("license").string(var_1049.as_str());
}
if input.payload != 0 {
object.key("payload").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.payload).into()),
);
}
if let Some(var_1050) = &input.preset {
object.key("preset").string(var_1050.as_str());
}
if let Some(var_1051) = &input.strength {
object.key("strength").string(var_1051.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_caption_source_framerate(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CaptionSourceFramerate,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.framerate_denominator != 0 {
object.key("framerateDenominator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_denominator).into()),
);
}
if input.framerate_numerator != 0 {
object.key("framerateNumerator").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.framerate_numerator).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_allowed_rendition_size(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AllowedRenditionSize,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.height != 0 {
object.key("height").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.height).into()),
);
}
if let Some(var_1052) = &input.required {
object.key("required").string(var_1052.as_str());
}
if input.width != 0 {
object.key("width").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.width).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_force_include_rendition_size(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ForceIncludeRenditionSize,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.height != 0 {
object.key("height").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.height).into()),
);
}
if input.width != 0 {
object.key("width").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.width).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_min_bottom_rendition_size(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MinBottomRenditionSize,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.height != 0 {
object.key("height").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.height).into()),
);
}
if input.width != 0 {
object.key("width").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.width).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_min_top_rendition_size(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::MinTopRenditionSize,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if input.height != 0 {
object.key("height").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.height).into()),
);
}
if input.width != 0 {
object.key("width").number(
#[allow(clippy::useless_conversion)]
aws_smithy_types::Number::NegInt((input.width).into()),
);
}
Ok(())
}
pub fn serialize_structure_crate_model_s3_destination_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::S3DestinationSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1053) = &input.access_control {
let mut object_1054 = object.key("accessControl").start_object();
crate::json_ser::serialize_structure_crate_model_s3_destination_access_control(
&mut object_1054,
var_1053,
)?;
object_1054.finish();
}
if let Some(var_1055) = &input.encryption {
let mut object_1056 = object.key("encryption").start_object();
crate::json_ser::serialize_structure_crate_model_s3_encryption_settings(
&mut object_1056,
var_1055,
)?;
object_1056.finish();
}
Ok(())
}
pub fn serialize_structure_crate_model_speke_key_provider_cmaf(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::SpekeKeyProviderCmaf,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1057) = &input.certificate_arn {
object.key("certificateArn").string(var_1057.as_str());
}
if let Some(var_1058) = &input.dash_signaled_system_ids {
let mut array_1059 = object.key("dashSignaledSystemIds").start_array();
for item_1060 in var_1058 {
{
array_1059.value().string(item_1060.as_str());
}
}
array_1059.finish();
}
if let Some(var_1061) = &input.hls_signaled_system_ids {
let mut array_1062 = object.key("hlsSignaledSystemIds").start_array();
for item_1063 in var_1061 {
{
array_1062.value().string(item_1063.as_str());
}
}
array_1062.finish();
}
if let Some(var_1064) = &input.resource_id {
object.key("resourceId").string(var_1064.as_str());
}
if let Some(var_1065) = &input.url {
object.key("url").string(var_1065.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_static_key_provider(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::StaticKeyProvider,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1066) = &input.key_format {
object.key("keyFormat").string(var_1066.as_str());
}
if let Some(var_1067) = &input.key_format_versions {
object.key("keyFormatVersions").string(var_1067.as_str());
}
if let Some(var_1068) = &input.static_key_value {
object.key("staticKeyValue").string(var_1068.as_str());
}
if let Some(var_1069) = &input.url {
object.key("url").string(var_1069.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_speke_key_provider(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::SpekeKeyProvider,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1070) = &input.certificate_arn {
object.key("certificateArn").string(var_1070.as_str());
}
if let Some(var_1071) = &input.resource_id {
object.key("resourceId").string(var_1071.as_str());
}
if let Some(var_1072) = &input.system_ids {
let mut array_1073 = object.key("systemIds").start_array();
for item_1074 in var_1072 {
{
array_1073.value().string(item_1074.as_str());
}
}
array_1073.finish();
}
if let Some(var_1075) = &input.url {
object.key("url").string(var_1075.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_s3_destination_access_control(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::S3DestinationAccessControl,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1076) = &input.canned_acl {
object.key("cannedAcl").string(var_1076.as_str());
}
Ok(())
}
pub fn serialize_structure_crate_model_s3_encryption_settings(
object: &mut aws_smithy_json::serialize::JsonObjectWriter,
input: &crate::model::S3EncryptionSettings,
) -> Result<(), aws_smithy_http::operation::SerializationError> {
if let Some(var_1077) = &input.encryption_type {
object.key("encryptionType").string(var_1077.as_str());
}
if let Some(var_1078) = &input.kms_encryption_context {
object.key("kmsEncryptionContext").string(var_1078.as_str());
}
if let Some(var_1079) = &input.kms_key_arn {
object.key("kmsKeyArn").string(var_1079.as_str());
}
Ok(())
}
| 37.929359 | 129 | 0.632354 |
1d70f256b2b433fae64c71c42bd9a16cf8e48008 | 2,906 | use std::{
io::{self, prelude::*},
time::Duration,
};
use crate::{error::*, PositionalResult};
/// Position within a readable source
#[derive(Debug, Clone)]
pub struct ReadPosition {
pub(crate) byte_offset: u64,
pub(crate) duration: Duration,
}
impl ReadPosition {
pub(crate) const fn new() -> Self {
Self {
byte_offset: 0,
duration: Duration::ZERO,
}
}
/// The number of bytes that have been consumed
#[must_use]
pub const fn byte_offset(&self) -> u64 {
self.byte_offset
}
/// The accumulated duration since the start of the stream
#[must_use]
pub const fn duration(&self) -> Duration {
self.duration
}
}
pub struct Reader<'r, T> {
reader: &'r mut T,
position: ReadPosition,
}
impl<'r, T: Read> Reader<'r, T> {
pub fn new(reader: &'r mut T) -> Self {
Reader {
reader,
position: ReadPosition::new(),
}
}
fn read_exact(&mut self, buffer: &mut [u8]) -> PositionalResult<()> {
self.reader
.read_exact(buffer)
.map(|()| {
self.position.byte_offset += buffer.len() as u64;
})
.map_err(|e| self.positional_error(e.into()))
}
pub fn try_read_exact_until_eof(&mut self, buffer: &mut [u8]) -> PositionalResult<bool> {
self.read_exact(buffer).map(|()| true).or_else(|err| {
if err.is_unexpected_eof() {
Ok(false)
} else {
Err(err)
}
})
}
fn skip(&mut self, max_bytes: u64) -> PositionalResult<u64> {
match io::copy(&mut self.reader.take(max_bytes), &mut io::sink()) {
Err(e) => Err(self.positional_error(e.into())),
Ok(num_bytes_skipped) => {
debug_assert!(num_bytes_skipped <= max_bytes);
self.position.byte_offset += num_bytes_skipped;
Ok(num_bytes_skipped)
}
}
}
pub fn try_skip_exact_until_eof(&mut self, num_bytes: u64) -> PositionalResult<bool> {
match self.skip(num_bytes) {
Ok(skipped_bytes) => {
debug_assert!(skipped_bytes <= num_bytes);
Ok(skipped_bytes == num_bytes)
}
Err(err) => {
if err.is_unexpected_eof() {
Ok(false)
} else {
Err(err)
}
}
}
}
pub fn position(&self) -> &ReadPosition {
&self.position
}
pub fn add_duration(&mut self, duration: Duration) {
self.position.duration += duration;
}
pub fn positional_error(&self, source: Error) -> PositionalError {
let Self { position, .. } = self;
PositionalError {
source,
position: position.to_owned(),
}
}
}
| 26.18018 | 93 | 0.519959 |
39fc224148e8ed7d8f3403c1e0d044ba4018ca35 | 1,168 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test a case where the associated type binding (to `bool`, in this
// case) is derived from the trait definition. Issue #21636.
// pretty-expanded FIXME #23616
use std::vec;
pub trait BitIter {
type Iter: Iterator<Item=bool>;
fn bit_iter(self) -> <Self as BitIter>::Iter;
}
impl BitIter for Vec<bool> {
type Iter = vec::IntoIter<bool>;
fn bit_iter(self) -> <Self as BitIter>::Iter {
self.into_iter()
}
}
fn count<T>(arg: T) -> usize
where T: BitIter
{
let mut sum = 0;
for i in arg.bit_iter() {
if i {
sum += 1;
}
}
sum
}
fn main() {
let v = vec![true, false, true];
let c = count(v);
assert_eq!(c, 2);
}
| 24.851064 | 68 | 0.640411 |
396c8cf1a79607f81c9797e85ee5b9b42201fefe | 1,199 | // hashmap1.rs
// A basket of fruits in the form of a hash map needs to be defined.
// The key represents the name of the fruit and the value represents
// how many of that particular fruit is in the basket. You have to put
// at least three different types of fruits (e.g apple, banana, mango)
// in the basket and the total count of all the fruits should be at
// least five.
//
// Make me compile and pass the tests!
//
// Execute the command `rustlings hint hashmap1` if you need
// hints.
use std::collections::HashMap;
fn fruit_basket() -> HashMap<String, u32> {
let mut basket = HashMap::<String, u32>::new();
// Two bananas are already given for you :)
basket.insert(String::from("banana"), 2);
// TODO: Put more fruits in your basket here.
basket.insert(String::from("strawberry"), 20);
basket.insert(String::from("apple"), 2);
basket
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn at_least_three_types_of_fruits() {
let basket = fruit_basket();
assert!(basket.len() >= 3);
}
#[test]
fn at_least_five_fruits() {
let basket = fruit_basket();
assert!(basket.values().sum::<u32>() >= 5);
}
}
| 26.644444 | 70 | 0.644704 |
d9c91ee361fc5f503dd144ab2a70dc961639cf18 | 9,853 | #![deny(missing_docs)]
use super::{super::helpers::Arguments, AttributeValidator};
use crate::{ast, diagnostics::DatamodelError, dml, transform::helpers::ValueValidator, IndexDefinition, IndexType};
use std::cmp::Ordering;
use std::collections::HashMap;
/// Prismas builtin `@unique` attribute.
pub struct FieldLevelUniqueAttributeValidator {}
impl AttributeValidator<dml::Field> for FieldLevelUniqueAttributeValidator {
fn attribute_name(&self) -> &'static str {
&"unique"
}
fn validate_and_apply(&self, args: &mut Arguments<'_>, obj: &mut dml::Field) -> Result<(), DatamodelError> {
if let dml::Field::RelationField(rf) = obj {
let suggestion = match rf.relation_info.fields.len().cmp(&1) {
Ordering::Equal => format!(
" Did you mean to put it on `{}`?",
rf.relation_info.fields.first().unwrap()
),
Ordering::Greater => format!(
" Did you mean to provide `@@unique([{}])`?",
rf.relation_info.fields.join(", ")
),
// no suggestion possible
Ordering::Less => String::new(),
};
return self.new_attribute_validation_error(
&format!(
"The field `{field_name}` is a relation field and cannot be marked with `{attribute_name}`. Only scalar fields can be made unique.{suggestion}",
field_name = rf.name,
attribute_name = self.attribute_name(),
suggestion = suggestion
),
args.span(),
);
} else if let dml::Field::ScalarField(sf) = obj {
if sf.is_id {
return self.new_attribute_validation_error(
"Fields that are marked as id should not have an additional @unique.",
args.span(),
);
} else {
sf.is_unique = true;
}
}
Ok(())
}
fn serialize(&self, field: &dml::Field, _datamodel: &dml::Datamodel) -> Vec<ast::Attribute> {
if let dml::Field::ScalarField(sf) = field {
if sf.is_unique {
return vec![ast::Attribute::new(self.attribute_name(), vec![])];
}
}
vec![]
}
}
/// Prismas builtin `@@unique` attribute.
pub struct ModelLevelUniqueAttributeValidator {}
impl IndexAttributeBase<dml::Model> for ModelLevelUniqueAttributeValidator {}
impl AttributeValidator<dml::Model> for ModelLevelUniqueAttributeValidator {
fn attribute_name(&self) -> &str {
"unique"
}
fn is_duplicate_definition_allowed(&self) -> bool {
true
}
fn validate_and_apply(&self, args: &mut Arguments<'_>, obj: &mut dml::Model) -> Result<(), DatamodelError> {
let index_def = self.validate_index(args, obj, IndexType::Unique)?;
obj.indices.push(index_def);
Ok(())
}
fn serialize(&self, model: &dml::Model, _datamodel: &dml::Datamodel) -> Vec<ast::Attribute> {
self.serialize_index_definitions(&model, IndexType::Unique)
}
}
/// Prismas builtin `@@index` attribute.
pub struct ModelLevelIndexAttributeValidator {}
impl IndexAttributeBase<dml::Model> for ModelLevelIndexAttributeValidator {}
impl AttributeValidator<dml::Model> for ModelLevelIndexAttributeValidator {
fn attribute_name(&self) -> &str {
"index"
}
fn is_duplicate_definition_allowed(&self) -> bool {
true
}
fn validate_and_apply(&self, args: &mut Arguments<'_>, obj: &mut dml::Model) -> Result<(), DatamodelError> {
let index_def = self.validate_index(args, obj, IndexType::Normal)?;
obj.indices.push(index_def);
Ok(())
}
fn serialize(&self, model: &dml::Model, _datamodel: &dml::Datamodel) -> Vec<ast::Attribute> {
self.serialize_index_definitions(&model, IndexType::Normal)
}
}
/// common logic for `@@unique` and `@@index`
trait IndexAttributeBase<T>: AttributeValidator<T> {
fn validate_index(
&self,
args: &mut Arguments<'_>,
obj: &mut dml::Model,
index_type: IndexType,
) -> Result<IndexDefinition, DatamodelError> {
let mut index_def = IndexDefinition {
name: None,
fields: vec![],
tpe: index_type,
};
match args
.optional_arg("name")
.as_ref()
.and_then(ValueValidator::as_string_literal)
{
Some(("", span)) => {
return Err(DatamodelError::new_attribute_validation_error(
"The `name` argument cannot be an empty string.",
self.attribute_name(),
span,
))
}
Some((name, _)) => index_def.name = Some(name.to_owned()),
None => (),
};
let fields = args
.default_arg("fields")?
.as_array()
.iter()
.map(|f| f.as_constant_literal())
.collect::<Result<Vec<_>, _>>()?;
index_def.fields = fields;
let duplicated_fields = find_duplicates(&index_def.fields);
if !duplicated_fields.is_empty() {
return Err(DatamodelError::new_model_validation_error(
&format!(
"The {}index definition refers to the fields {} multiple times.",
if index_type == IndexType::Unique { "unique " } else { "" },
duplicated_fields.join(", ")
),
&obj.name,
args.span(),
));
}
let undefined_fields: Vec<String> = index_def
.fields
.iter()
.filter_map(|field| {
if obj.find_field(&field).is_none() {
Some(field.to_string())
} else {
None
}
})
.collect();
let referenced_relation_fields: Vec<String> = index_def
.fields
.iter()
.filter(|field| obj.find_relation_field(&field).is_some())
.map(|f| f.to_owned())
.collect();
if !undefined_fields.is_empty() {
return Err(DatamodelError::new_model_validation_error(
&format!(
"The {}index definition refers to the unknown fields {}.",
if index_type == IndexType::Unique { "unique " } else { "" },
undefined_fields.join(", ")
),
&obj.name,
args.span(),
));
}
if !referenced_relation_fields.is_empty() {
let mut suggested_fields = Vec::new();
let mut had_successful_replacement = false;
for f in &index_def.fields {
if let Some(rf) = obj.find_relation_field(&f) {
for underlying_field in &rf.relation_info.fields {
suggested_fields.push(underlying_field.to_owned());
had_successful_replacement = true;
}
}
if let Some(sf) = obj.find_scalar_field(&f) {
suggested_fields.push(sf.name.clone());
}
}
let suggestion = if had_successful_replacement {
format!(
" Did you mean `@@{attribute_name}([{fields}])`?",
attribute_name = attribute_name(index_type),
fields = suggested_fields.join(", ")
)
} else {
String::new()
};
return Err(DatamodelError::new_model_validation_error(
&format!(
"The {prefix}index definition refers to the relation fields {the_fields}. Index definitions must reference only scalar fields.{suggestion}",
prefix = if index_type == IndexType::Unique { "unique " } else { "" },
the_fields = referenced_relation_fields.join(", "),
suggestion = suggestion
),
&obj.name,
args.span(),
));
}
Ok(index_def)
}
fn serialize_index_definitions(&self, model: &dml::Model, index_type: IndexType) -> Vec<ast::Attribute> {
let attributes: Vec<ast::Attribute> = model
.indices
.iter()
.filter(|index| index.tpe == index_type)
.map(|index_def| {
let mut args = vec![ast::Argument::new_array(
"",
index_def
.fields
.iter()
.map(|f| ast::Expression::ConstantValue(f.to_string(), ast::Span::empty()))
.collect(),
)];
if let Some(name) = &index_def.name {
args.push(ast::Argument::new_string("name", &name));
}
ast::Attribute::new(self.attribute_name(), args)
})
.collect();
attributes
}
}
fn attribute_name(index_type: dml::IndexType) -> &'static str {
if index_type == dml::IndexType::Unique {
"unique"
} else {
"index"
}
}
// returns the items that are contained multiple times in the provided vector
fn find_duplicates(items: &[String]) -> Vec<String> {
let mut counts = HashMap::new();
for item in items.iter() {
let entry = counts.entry(item).or_insert(0);
*entry += 1;
}
let mut result = Vec::new();
for (key, count) in counts.into_iter() {
if count > 1 {
result.push(key.to_owned());
}
}
result
}
| 33.859107 | 164 | 0.522277 |
abe657473579d147a088ab2baec071742e306532 | 806 | use std::str::FromStr;
pub fn init(level: &str) {
fern::Dispatch::new()
.format(|out, message, record| {
out.finish(format_args!(
"{ts} {lvl:<5} [{thread:>25.25}] | userId requestId | {file:<40.40} {msg}",
ts = chrono::Local::now().format("%Y-%m-%dT%H:%M:%S%.3f"),
lvl = record.level(),
thread = std::thread::current().name().unwrap_or("main"),
file = record.file().unwrap_or("no_file.rs"),
msg = message
))
})
.level(log::LevelFilter::Off)
.level_for(
"rust_tide_backend",
log::LevelFilter::from_str(level).unwrap_or(log::LevelFilter::Info),
)
.chain(std::io::stdout())
.apply()
.unwrap();
}
| 33.583333 | 91 | 0.480149 |
1a252ec4f396020a3103d35fe11f67d8bc83f32e | 1,806 | mod helpers;
use helpers::{new_demo_project, dove, delete_project};
/// $ dove call 'main()'
/// $ dove call 'one_param(true)'
/// $ dove call 'two_params(1,1)'
#[test]
fn test_cmd_dove_call() {
let project_name = "project_call";
let project_folder = new_demo_project(project_name).unwrap();
for (name, call) in [
("main", "main()"),
("one_param", "one_param(true)"),
("two_params", "two_params(1,1)"),
] {
dove(&["call", call], &project_folder).unwrap();
let tx_path = project_folder
.join("build")
.join("for_tests")
.join("transaction")
.join(format!("{}.mvt", name));
assert!(tx_path.exists());
}
delete_project(&project_folder).unwrap();
}
/// $ dove call 'one_param' -a true
/// $ dove call 'two_params' --args 1 1
#[test]
fn test_cmd_dove_call_with_params() {
let project_name = "project_call_with_params";
let project_folder = new_demo_project(project_name).unwrap();
for call in [
vec!["call", "one_param", "-a", "true"],
vec!["call", "two_params", "--args", "1", "1"],
] {
dove(&call, &project_folder).unwrap();
}
delete_project(&project_folder).unwrap();
}
/// With type
/// $ dove call 'with_type<u8>(1)'
/// $ dove call 'with_type(1)' -t u8
/// $ dove call 'with_type' -a 1 -t u8
#[test]
fn test_cmd_dove_call_with_type() {
let project_name = "project_call_with_type";
let project_folder = new_demo_project(project_name).unwrap();
for call in [
vec!["call", "with_type<u8>(1)"],
vec!["call", "with_type(1)", "-t", "u8"],
vec!["call", "with_type", "-a", "1", "-t", "u8"],
] {
dove(&call, &project_folder).unwrap();
}
delete_project(&project_folder).unwrap();
}
| 27.784615 | 65 | 0.580288 |
f4fb96ea6a6708e35c0cfa46871c22438a6b34db | 9,897 | use arret_runtime::boxed;
use arret_runtime::boxed::prelude::*;
use crate::mir::eval_hir::EvalHirCtx;
use crate::mir::tagset::TypeTagSet;
use crate::mir::value::Value;
use crate::ty;
use crate::ty::record;
use crate::ty::Ty;
/// Compact hint for `RegValue`'s type that can't be captured in its type tags
///
/// To allow type hints to apply to unions, each hint is predicated on the value having the
/// appropriate type. For example, `KnownRecordCons` does not imply that the value is a record, its
/// type tag must be checked first.
///
/// It's possible for multiple `TypeHint`s to be applicable to the same type. However, this is
/// unlikely so only a single type hint will be stored. The choice of type hint in these cases is
/// arbitrary.
#[derive(Debug, Clone, PartialEq)]
pub enum TypeHint {
/// Record of a known class
KnownRecordCons(record::ConsId),
/// List of a known length
KnownListLen(usize),
/// Vector of a known length
KnownVectorLen(usize),
/// No type hint
None,
}
#[derive(PartialEq, Debug)]
enum FoundRecordConses<'a> {
Multi,
Single(&'a record::ConsId),
None,
}
/// Looks for the possible record conses of a type reference
fn find_record_conses_for_ty_ref<M>(ty_ref: &ty::Ref<M>) -> FoundRecordConses<'_>
where
M: ty::Pm,
{
match ty_ref.try_to_fixed() {
Some(Ty::Union(members)) => members
.iter()
.map(|member| find_record_conses_for_ty_ref(member))
.fold(FoundRecordConses::None, |member1, member2| {
match (member1, member2) {
(FoundRecordConses::Multi, _) | (_, FoundRecordConses::Multi) => {
FoundRecordConses::Multi
}
(FoundRecordConses::None, FoundRecordConses::Single(single))
| (FoundRecordConses::Single(single), FoundRecordConses::None) => {
FoundRecordConses::Single(single)
}
(FoundRecordConses::Single(single1), FoundRecordConses::Single(single2)) => {
if single1 == single2 {
FoundRecordConses::Single(single1)
} else {
FoundRecordConses::Multi
}
}
(FoundRecordConses::None, FoundRecordConses::None) => FoundRecordConses::None,
}
}),
Some(Ty::Record(instance)) => FoundRecordConses::Single(instance.cons()),
Some(Ty::RecordClass(cons)) => FoundRecordConses::Single(cons),
// These could be anything
None | Some(Ty::Any) | Some(Ty::TopRecord) => FoundRecordConses::Multi,
Some(_) => FoundRecordConses::None,
}
}
pub fn type_hint_for_ty_ref<M>(ty_ref: &ty::Ref<M>) -> TypeHint
where
M: ty::Pm,
{
if let FoundRecordConses::Single(known_record_cons) = find_record_conses_for_ty_ref(ty_ref) {
return TypeHint::KnownRecordCons(known_record_cons.clone());
}
if let Some(Ty::List(list)) = ty_ref.try_to_fixed() {
let std::ops::Range { start, end } = list.size_range();
if start == end {
return TypeHint::KnownListLen(start);
}
}
if let Some(Ty::Vector(members)) = ty_ref.try_to_fixed() {
return TypeHint::KnownVectorLen(members.len());
}
TypeHint::None
}
pub fn known_record_cons_for_value<'a>(
ehx: &'a EvalHirCtx,
value: &'a Value,
) -> Option<&'a record::ConsId> {
match value {
Value::Const(any_ref) => any_ref.downcast_ref::<boxed::Record>().map(|record_ref| {
ehx.cons_for_jit_record_class_id(record_ref.class_id())
.expect("unable to lookup record cons for JIT record class ID")
}),
Value::Record(cons, _) => Some(cons),
Value::Reg(reg_value) => {
if let TypeHint::KnownRecordCons(ref cons) = reg_value.type_hint {
Some(cons)
} else {
None
}
}
_ => None,
}
}
pub fn known_vector_len_for_value(value: &Value) -> Option<usize> {
match value {
Value::Const(any_ref) => any_ref
.downcast_ref::<boxed::Vector>()
.map(|vector_ref| vector_ref.len()),
Value::Reg(reg_value) => {
if let TypeHint::KnownVectorLen(known_len) = reg_value.type_hint {
Some(known_len)
} else {
None
}
}
_ => None,
}
}
pub fn type_hint_for_value(ehx: &EvalHirCtx, value: &Value) -> TypeHint {
if let Some(cons) = known_record_cons_for_value(ehx, value) {
return TypeHint::KnownRecordCons(cons.clone());
}
match value {
Value::Const(any_ref) => any_ref
.downcast_ref::<boxed::Vector>()
.map(|vector_ref| TypeHint::KnownVectorLen(vector_ref.len()))
.unwrap_or(TypeHint::None),
Value::Reg(reg_value) => reg_value.type_hint.clone(),
_ => TypeHint::None,
}
}
/// Returns a TypeTagSet containing the possible type tags for a given value
pub fn possible_type_tags_for_value(value: &Value) -> TypeTagSet {
match value {
Value::Const(any_ref) => any_ref.header().type_tag().into(),
Value::ArretFun(_)
| Value::RustFun(_)
| Value::TyPred(_)
| Value::EqPred
| Value::RecordCons(_)
| Value::FieldAccessor(_, _) => boxed::TypeTag::FunThunk.into(),
Value::List(fixed, rest) => {
if !fixed.is_empty() {
// Non-empty list
boxed::TypeTag::Pair.into()
} else if let Some(tail) = rest {
possible_type_tags_for_value(tail)
} else {
// Empty list
boxed::TypeTag::Nil.into()
}
}
Value::Record(_, _) => boxed::TypeTag::Record.into(),
Value::Reg(reg_value) => reg_value.possible_type_tags,
}
}
/// Annotates an existing value with Arret type information
///
/// For the majority of values this is a no-op. For this reason this function takes a builder for
/// the Arret type that is only invoked if the type information can be used.
pub fn value_with_arret_ty<F>(
heap: &mut impl boxed::AsHeap,
value: Value,
build_arret_ty: F,
) -> Value
where
F: FnOnce() -> ty::Ref<ty::Mono>,
{
if let Value::Reg(reg_value) = value {
use crate::mir::value::from_reg::refine_reg_value_with_arret_ty;
// This could be useful; request the type
let arret_ty = build_arret_ty();
refine_reg_value_with_arret_ty(heap, ®_value, &arret_ty)
} else {
value
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::hir::tvar_bounded_by;
use crate::source::EMPTY_SPAN;
use crate::ty::ty_args::TyArgs;
#[test]
fn test_find_record_conses_for_ty_ref() {
let cons1 = record::Cons::new(
EMPTY_SPAN,
"cons1".into(),
"cons1?".into(),
None,
Box::new([]),
);
let cons2 = record::Cons::new(
EMPTY_SPAN,
"cons2".into(),
"cons2?".into(),
None,
Box::new([]),
);
let class1_poly: ty::Ref<ty::Poly> = cons1.clone().into();
let class2_poly: ty::Ref<ty::Poly> = cons2.clone().into();
let instance1_poly: ty::Ref<ty::Poly> =
record::Instance::new(cons1.clone(), TyArgs::empty()).into();
let instance2_poly: ty::Ref<ty::Poly> =
record::Instance::new(cons2.clone(), TyArgs::empty()).into();
// Unit type can't contain a record type
assert_eq!(
FoundRecordConses::None,
find_record_conses_for_ty_ref::<ty::Poly>(&Ty::unit().into())
);
// `Any` could contain any record cons
assert_eq!(
FoundRecordConses::Multi,
find_record_conses_for_ty_ref::<ty::Poly>(&Ty::Any.into())
);
// `TopRecord` could contain any record cons
assert_eq!(
FoundRecordConses::Multi,
find_record_conses_for_ty_ref::<ty::Poly>(&Ty::TopRecord.into())
);
// TVar could contain any record cons
assert_eq!(
FoundRecordConses::Multi,
find_record_conses_for_ty_ref(&tvar_bounded_by(Ty::Any.into()))
);
// Class type can have the record cons
assert_eq!(
FoundRecordConses::Single(&cons1),
find_record_conses_for_ty_ref(&class1_poly)
);
// Instance type can have the record cons
assert_eq!(
FoundRecordConses::Single(&cons2),
find_record_conses_for_ty_ref(&instance2_poly)
);
// Union of class and instance of the same class has the record cons
assert_eq!(
FoundRecordConses::Single(&cons1),
find_record_conses_for_ty_ref(
&Ty::Union(Box::new([class1_poly, instance1_poly.clone()])).into()
)
);
// Bool + record could only have the record cons
assert_eq!(
FoundRecordConses::Single(&cons2),
find_record_conses_for_ty_ref(
&Ty::Union(Box::new([Ty::Bool.into(), instance2_poly.clone()])).into()
)
);
// Multiple record types
assert_eq!(
FoundRecordConses::Multi,
find_record_conses_for_ty_ref(
&Ty::Union(Box::new([class2_poly, instance1_poly])).into()
)
);
// TVar inside a union could be any record type
assert_eq!(
FoundRecordConses::Multi,
find_record_conses_for_ty_ref(
&Ty::Union(Box::new([tvar_bounded_by(Ty::Any.into()), instance2_poly])).into()
)
);
}
}
| 32.343137 | 99 | 0.571183 |
9bc69880051559ad754e84785cc84a9d1f2e6d9e | 27,768 | use io;
use std::vec::Vec;
use std::string::String;
use std::borrow::ToOwned;
use super::{
Serialize,
Deserialize,
Error,
VarUint7,
VarUint32,
CountedList,
ImportEntry,
MemoryType,
TableType,
ExportEntry,
GlobalEntry,
Func,
FuncBody,
ElementSegment,
DataSegment,
CountedWriter,
CountedListWriter,
External,
serialize,
};
use super::types::Type;
use super::name_section::NameSection;
use super::reloc_section::RelocSection;
const ENTRIES_BUFFER_LENGTH: usize = 16384;
/// Section in the WebAssembly module.
#[derive(Debug, Clone, PartialEq)]
pub enum Section {
/// Section is unparsed.
Unparsed {
/// id of the unparsed section
id: u8,
/// raw bytes of the unparsed section
payload: Vec<u8>,
},
/// Custom section (`id=0`)
Custom(CustomSection),
/// Types section
Type(TypeSection),
/// Import section
Import(ImportSection),
/// Function signatures section
Function(FunctionSection),
/// Table definition section
Table(TableSection),
/// Memory definition section
Memory(MemorySection),
/// Global entries section
Global(GlobalSection),
/// Export definitions
Export(ExportSection),
/// Entry reference of the module
Start(u32),
/// Elements section
Element(ElementSection),
/// Function bodies section
Code(CodeSection),
/// Data definition section
Data(DataSection),
/// Name section.
///
/// Note that initially it is not parsed until `parse_names` is called explicitly.
Name(NameSection),
/// Relocation section.
///
/// Note that initially it is not parsed until `parse_reloc` is called explicitly.
/// Also note that currently there are serialization (but not de-serialization)
/// issues with this section (#198)
Reloc(RelocSection),
}
impl Deserialize for Section {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
let id = match VarUint7::deserialize(reader) {
// todo: be more selective detecting no more section
Err(_) => { return Err(Error::UnexpectedEof); },
Ok(id) => id,
};
Ok(
match id.into() {
0 => {
Section::Custom(CustomSection::deserialize(reader)?.into())
},
1 => {
Section::Type(TypeSection::deserialize(reader)?)
},
2 => {
Section::Import(ImportSection::deserialize(reader)?)
},
3 => {
Section::Function(FunctionSection::deserialize(reader)?)
},
4 => {
Section::Table(TableSection::deserialize(reader)?)
},
5 => {
Section::Memory(MemorySection::deserialize(reader)?)
},
6 => {
Section::Global(GlobalSection::deserialize(reader)?)
},
7 => {
Section::Export(ExportSection::deserialize(reader)?)
},
8 => {
let mut section_reader = SectionReader::new(reader)?;
let start_idx = VarUint32::deserialize(&mut section_reader)?;
section_reader.close()?;
Section::Start(start_idx.into())
},
9 => {
Section::Element(ElementSection::deserialize(reader)?)
},
10 => {
Section::Code(CodeSection::deserialize(reader)?)
},
11 => {
Section::Data(DataSection::deserialize(reader)?)
},
invalid_id => {
return Err(Error::InvalidSectionId(invalid_id))
},
}
)
}
}
impl Serialize for Section {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
match self {
Section::Custom(custom_section) => {
VarUint7::from(0x00).serialize(writer)?;
custom_section.serialize(writer)?;
},
Section::Unparsed { id, payload } => {
VarUint7::from(id).serialize(writer)?;
writer.write(&payload[..])?;
},
Section::Type(type_section) => {
VarUint7::from(0x01).serialize(writer)?;
type_section.serialize(writer)?;
},
Section::Import(import_section) => {
VarUint7::from(0x02).serialize(writer)?;
import_section.serialize(writer)?;
},
Section::Function(function_section) => {
VarUint7::from(0x03).serialize(writer)?;
function_section.serialize(writer)?;
},
Section::Table(table_section) => {
VarUint7::from(0x04).serialize(writer)?;
table_section.serialize(writer)?;
},
Section::Memory(memory_section) => {
VarUint7::from(0x05).serialize(writer)?;
memory_section.serialize(writer)?;
},
Section::Global(global_section) => {
VarUint7::from(0x06).serialize(writer)?;
global_section.serialize(writer)?;
},
Section::Export(export_section) => {
VarUint7::from(0x07).serialize(writer)?;
export_section.serialize(writer)?;
},
Section::Start(index) => {
VarUint7::from(0x08).serialize(writer)?;
let mut counted_writer = CountedWriter::new(writer);
VarUint32::from(index).serialize(&mut counted_writer)?;
counted_writer.done()?;
},
Section::Element(element_section) => {
VarUint7::from(0x09).serialize(writer)?;
element_section.serialize(writer)?;
},
Section::Code(code_section) => {
VarUint7::from(0x0a).serialize(writer)?;
code_section.serialize(writer)?;
},
Section::Data(data_section) => {
VarUint7::from(0x0b).serialize(writer)?;
data_section.serialize(writer)?;
},
Section::Name(name_section) => {
VarUint7::from(0x00).serialize(writer)?;
let custom = CustomSection {
name: "name".to_owned(),
payload: serialize(name_section)?,
};
custom.serialize(writer)?;
},
Section::Reloc(reloc_section) => {
VarUint7::from(0x00).serialize(writer)?;
reloc_section.serialize(writer)?;
},
}
Ok(())
}
}
impl Section {
pub(crate) fn id(&self) -> u8 {
match *self {
Section::Custom(_) => 0x00,
Section::Unparsed { .. } => 0x00,
Section::Type(_) => 0x1,
Section::Import(_) => 0x2,
Section::Function(_) => 0x3,
Section::Table(_) => 0x4,
Section::Memory(_) => 0x5,
Section::Global(_) => 0x6,
Section::Export(_) => 0x7,
Section::Start(_) => 0x8,
Section::Element(_) => 0x9,
Section::Code(_) => 0x0a,
Section::Data(_) => 0x0b,
Section::Name(_) => 0x00,
Section::Reloc(_) => 0x00,
}
}
}
pub(crate) struct SectionReader {
cursor: io::Cursor<Vec<u8>>,
declared_length: usize,
}
impl SectionReader {
pub fn new<R: io::Read>(reader: &mut R) -> Result<Self, ::elements::Error> {
let length = u32::from(VarUint32::deserialize(reader)?) as usize;
let inner_buffer = buffered_read!(ENTRIES_BUFFER_LENGTH, length, reader);
let buf_length = inner_buffer.len();
let cursor = io::Cursor::new(inner_buffer);
Ok(SectionReader {
cursor: cursor,
declared_length: buf_length,
})
}
pub fn close(self) -> Result<(), io::Error> {
let cursor = self.cursor;
let buf_length = self.declared_length;
if cursor.position() != buf_length {
Err(io::Error::InvalidData)
} else {
Ok(())
}
}
}
impl io::Read for SectionReader {
fn read(&mut self, buf: &mut [u8]) -> io::Result<()> {
self.cursor.read(buf)?;
Ok(())
}
}
fn read_entries<R: io::Read, T: Deserialize<Error=::elements::Error>>(reader: &mut R)
-> Result<Vec<T>, ::elements::Error>
{
let mut section_reader = SectionReader::new(reader)?;
let result = CountedList::<T>::deserialize(&mut section_reader)?.into_inner();
section_reader.close()?;
Ok(result)
}
/// Custom section
#[derive(Debug, Default, Clone, PartialEq)]
pub struct CustomSection {
name: String,
payload: Vec<u8>,
}
impl CustomSection {
/// Creates a new custom section with the given name and payload
pub fn new(name: String, payload: Vec<u8>) -> CustomSection {
CustomSection { name, payload }
}
/// Name of the custom section
pub fn name(&self) -> &str {
&self.name
}
/// Payload of the custom secion
pub fn payload(&self) -> &[u8] {
&self.payload
}
/// Name of the custom section (mutable)
pub fn name_mut(&mut self) -> &mut String {
&mut self.name
}
/// Payload of the custom section (mutable)
pub fn payload_mut(&mut self) -> &mut Vec<u8> {
&mut self.payload
}
}
impl Deserialize for CustomSection {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
let section_length: usize = u32::from(VarUint32::deserialize(reader)?) as usize;
let buf = buffered_read!(16384, section_length, reader);
let mut cursor = io::Cursor::new(&buf[..]);
let name = String::deserialize(&mut cursor)?;
let payload = buf[cursor.position() as usize..].to_vec();
Ok(CustomSection { name: name, payload: payload })
}
}
impl Serialize for CustomSection {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
use io::Write;
let mut counted_writer = CountedWriter::new(writer);
self.name.serialize(&mut counted_writer)?;
counted_writer.write(&self.payload[..])?;
counted_writer.done()?;
Ok(())
}
}
/// Section with type declarations
#[derive(Debug, Default, Clone, PartialEq)]
pub struct TypeSection(Vec<Type>);
impl TypeSection {
/// New type section with provided types
pub fn with_types(types: Vec<Type>) -> Self {
TypeSection(types)
}
/// List of type declarations
pub fn types(&self) -> &[Type] {
&self.0
}
/// List of type declarations (mutable)
pub fn types_mut(&mut self) -> &mut Vec<Type> {
&mut self.0
}
}
impl Deserialize for TypeSection {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
Ok(TypeSection(read_entries(reader)?))
}
}
impl Serialize for TypeSection {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
let mut counted_writer = CountedWriter::new(writer);
let data = self.0;
let counted_list = CountedListWriter::<Type, _>(
data.len(),
data.into_iter().map(Into::into),
);
counted_list.serialize(&mut counted_writer)?;
counted_writer.done()?;
Ok(())
}
}
/// Section of the imports definition.
#[derive(Debug, Default, Clone, PartialEq)]
pub struct ImportSection(Vec<ImportEntry>);
impl ImportSection {
/// New import section with provided types
pub fn with_entries(entries: Vec<ImportEntry>) -> Self {
ImportSection(entries)
}
/// List of import entries.
pub fn entries(&self) -> &[ImportEntry] {
&self.0
}
/// List of import entries (mutable).
pub fn entries_mut(&mut self) -> &mut Vec<ImportEntry> {
&mut self.0
}
/// Returns number of functions
pub fn functions(&self) -> usize {
self.0.iter()
.filter(|entry| match entry.external() { &External::Function(_) => true, _ => false })
.count()
}
/// Returns number of globals
pub fn globals(&self) -> usize {
self.0.iter()
.filter(|entry| match entry.external() { &External::Global(_) => true, _ => false })
.count()
}
}
impl Deserialize for ImportSection {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
Ok(ImportSection(read_entries(reader)?))
}
}
impl Serialize for ImportSection {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
let mut counted_writer = CountedWriter::new(writer);
let data = self.0;
let counted_list = CountedListWriter::<ImportEntry, _>(
data.len(),
data.into_iter().map(Into::into),
);
counted_list.serialize(&mut counted_writer)?;
counted_writer.done()?;
Ok(())
}
}
/// Section with function signatures definition.
#[derive(Default, Debug, Clone, PartialEq)]
pub struct FunctionSection(Vec<Func>);
impl FunctionSection {
/// New function signatures section with provided entries
pub fn with_entries(entries: Vec<Func>) -> Self {
FunctionSection(entries)
}
/// List of all functions in the section, mutable
pub fn entries_mut(&mut self) -> &mut Vec<Func> {
&mut self.0
}
/// List of all functions in the section
pub fn entries(&self) -> &[Func] {
&self.0
}
}
impl Deserialize for FunctionSection {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
Ok(FunctionSection(read_entries(reader)?))
}
}
impl Serialize for FunctionSection {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
let mut counted_writer = CountedWriter::new(writer);
let data = self.0;
let counted_list = CountedListWriter::<VarUint32, _>(
data.len(),
data.into_iter().map(|func| func.type_ref().into())
);
counted_list.serialize(&mut counted_writer)?;
counted_writer.done()?;
Ok(())
}
}
/// Section with table definition (currently only one is allowed).
#[derive(Default, Debug, Clone, PartialEq)]
pub struct TableSection(Vec<TableType>);
impl TableSection {
/// Table entries.
pub fn entries(&self) -> &[TableType] {
&self.0
}
/// New table section with provided table entries
pub fn with_entries(entries: Vec<TableType>) -> Self {
TableSection(entries)
}
/// Mutable table entries.
pub fn entries_mut(&mut self) -> &mut Vec<TableType> {
&mut self.0
}
}
impl Deserialize for TableSection {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
Ok(TableSection(read_entries(reader)?))
}
}
impl Serialize for TableSection {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
let mut counted_writer = CountedWriter::new(writer);
let data = self.0;
let counted_list = CountedListWriter::<TableType, _>(
data.len(),
data.into_iter().map(Into::into),
);
counted_list.serialize(&mut counted_writer)?;
counted_writer.done()?;
Ok(())
}
}
/// Section with table definition (currently only one entry is allowed).
#[derive(Default, Debug, Clone, PartialEq)]
pub struct MemorySection(Vec<MemoryType>);
impl MemorySection {
/// List of all memory entries in the section
pub fn entries(&self) -> &[MemoryType] {
&self.0
}
/// New memory section with memory types
pub fn with_entries(entries: Vec<MemoryType>) -> Self {
MemorySection(entries)
}
/// Mutable list of all memory entries in the section
pub fn entries_mut(&mut self) -> &mut Vec<MemoryType> {
&mut self.0
}
}
impl Deserialize for MemorySection {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
Ok(MemorySection(read_entries(reader)?))
}
}
impl Serialize for MemorySection {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
let mut counted_writer = CountedWriter::new(writer);
let data = self.0;
let counted_list = CountedListWriter::<MemoryType, _>(
data.len(),
data.into_iter().map(Into::into),
);
counted_list.serialize(&mut counted_writer)?;
counted_writer.done()?;
Ok(())
}
}
/// Globals definition section.
#[derive(Default, Debug, Clone, PartialEq)]
pub struct GlobalSection(Vec<GlobalEntry>);
impl GlobalSection {
/// List of all global entries in the section
pub fn entries(&self) -> &[GlobalEntry] {
&self.0
}
/// New global section from list of global entries
pub fn with_entries(entries: Vec<GlobalEntry>) -> Self {
GlobalSection(entries)
}
/// List of all global entries in the section (mutable)
pub fn entries_mut(&mut self) -> &mut Vec<GlobalEntry> {
&mut self.0
}
}
impl Deserialize for GlobalSection {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
Ok(GlobalSection(read_entries(reader)?))
}
}
impl Serialize for GlobalSection {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
let mut counted_writer = CountedWriter::new(writer);
let data = self.0;
let counted_list = CountedListWriter::<GlobalEntry, _>(
data.len(),
data.into_iter().map(Into::into),
);
counted_list.serialize(&mut counted_writer)?;
counted_writer.done()?;
Ok(())
}
}
/// List of exports definition.
#[derive(Debug, Default, Clone, PartialEq)]
pub struct ExportSection(Vec<ExportEntry>);
impl ExportSection {
/// List of all export entries in the section
pub fn entries(&self) -> &[ExportEntry] {
&self.0
}
/// New export section from list of export entries
pub fn with_entries(entries: Vec<ExportEntry>) -> Self {
ExportSection(entries)
}
/// List of all export entries in the section (mutable)
pub fn entries_mut(&mut self) -> &mut Vec<ExportEntry> {
&mut self.0
}
}
impl Deserialize for ExportSection {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
Ok(ExportSection(read_entries(reader)?))
}
}
impl Serialize for ExportSection {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
let mut counted_writer = CountedWriter::new(writer);
let data = self.0;
let counted_list = CountedListWriter::<ExportEntry, _>(
data.len(),
data.into_iter().map(Into::into),
);
counted_list.serialize(&mut counted_writer)?;
counted_writer.done()?;
Ok(())
}
}
/// Section with function bodies of the module.
#[derive(Default, Debug, Clone, PartialEq)]
pub struct CodeSection(Vec<FuncBody>);
impl CodeSection {
/// New code section with specified function bodies
pub fn with_bodies(bodies: Vec<FuncBody>) -> Self {
CodeSection(bodies)
}
/// All function bodies in the section.
pub fn bodies(&self) -> &[FuncBody] {
&self.0
}
/// All function bodies in the section, mutable.
pub fn bodies_mut(&mut self) -> &mut Vec<FuncBody> {
&mut self.0
}
}
impl Deserialize for CodeSection {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
Ok(CodeSection(read_entries(reader)?))
}
}
impl Serialize for CodeSection {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
let mut counted_writer = CountedWriter::new(writer);
let data = self.0;
let counted_list = CountedListWriter::<FuncBody, _>(
data.len(),
data.into_iter().map(Into::into),
);
counted_list.serialize(&mut counted_writer)?;
counted_writer.done()?;
Ok(())
}
}
/// Element entries section.
#[derive(Default, Debug, Clone, PartialEq)]
pub struct ElementSection(Vec<ElementSegment>);
impl ElementSection {
/// New elements section
pub fn with_entries(entries: Vec<ElementSegment>) -> Self {
ElementSection(entries)
}
/// New elements entries in the section
pub fn entries(&self) -> &[ElementSegment] {
&self.0
}
/// List of all data entries in the section (mutable)
pub fn entries_mut(&mut self) -> &mut Vec<ElementSegment> {
&mut self.0
}
}
impl Deserialize for ElementSection {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
Ok(ElementSection(read_entries(reader)?))
}
}
impl Serialize for ElementSection {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
let mut counted_writer = CountedWriter::new(writer);
let data = self.0;
let counted_list = CountedListWriter::<ElementSegment, _>(
data.len(),
data.into_iter().map(Into::into),
);
counted_list.serialize(&mut counted_writer)?;
counted_writer.done()?;
Ok(())
}
}
/// Data entries definitions.
#[derive(Default, Debug, Clone, PartialEq)]
pub struct DataSection(Vec<DataSegment>);
impl DataSection {
/// New data section
pub fn with_entries(entries: Vec<DataSegment>) -> Self {
DataSection(entries)
}
/// List of all data entries in the section
pub fn entries(&self) -> &[DataSegment] {
&self.0
}
/// List of all data entries in the section (mutable)
pub fn entries_mut(&mut self) -> &mut Vec<DataSegment> {
&mut self.0
}
}
impl Deserialize for DataSection {
type Error = Error;
fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
Ok(DataSection(read_entries(reader)?))
}
}
impl Serialize for DataSection {
type Error = Error;
fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
let mut counted_writer = CountedWriter::new(writer);
let data = self.0;
let counted_list = CountedListWriter::<DataSegment, _>(
data.len(),
data.into_iter().map(Into::into),
);
counted_list.serialize(&mut counted_writer)?;
counted_writer.done()?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::super::{
deserialize_buffer, deserialize_file, ValueType, InitExpr, DataSegment,
serialize, ElementSegment, Instructions, BlockType, Local, FuncBody,
};
use super::{Section, TypeSection, Type, DataSection, ElementSection, CodeSection};
#[test]
fn import_section() {
let module = deserialize_file("./res/cases/v1/test5.wasm").expect("Should be deserialized");
let mut found = false;
for section in module.sections() {
match section {
&Section::Import(ref import_section) => {
assert_eq!(25, import_section.entries().len());
found = true
},
_ => { }
}
}
assert!(found, "There should be import section in test5.wasm");
}
fn functions_test_payload() -> &'static [u8] {
&[
// functions section id
0x03u8,
// functions section length
0x87, 0x80, 0x80, 0x80, 0x0,
// number of functions
0x04,
// type reference 1
0x01,
// type reference 2
0x86, 0x80, 0x00,
// type reference 3
0x09,
// type reference 4
0x33
]
}
#[test]
fn fn_section_detect() {
let section: Section =
deserialize_buffer(functions_test_payload()).expect("section to be deserialized");
match section {
Section::Function(_) => {},
_ => {
panic!("Payload should be recognized as functions section")
}
}
}
#[test]
fn fn_section_number() {
let section: Section =
deserialize_buffer(functions_test_payload()).expect("section to be deserialized");
match section {
Section::Function(fn_section) => {
assert_eq!(4, fn_section.entries().len(), "There should be 4 functions total");
},
_ => {
// will be catched by dedicated test
}
}
}
#[test]
fn fn_section_ref() {
let section: Section =
deserialize_buffer(functions_test_payload()).expect("section to be deserialized");
match section {
Section::Function(fn_section) => {
assert_eq!(6, fn_section.entries()[1].type_ref());
},
_ => {
// will be catched by dedicated test
}
}
}
fn types_test_payload() -> &'static [u8] {
&[
// section length
11,
// 2 functions
2,
// func 1, form =1
0x60,
// param_count=1
1,
// first param
0x7e, // i64
// no return params
0x00,
// func 2, form=1
0x60,
// param_count=2
2,
// first param
0x7e,
// second param
0x7d,
// return param (is_present, param_type)
0x01, 0x7e
]
}
#[test]
fn type_section_len() {
let type_section: TypeSection =
deserialize_buffer(types_test_payload()).expect("type_section be deserialized");
assert_eq!(type_section.types().len(), 2);
}
#[test]
fn type_section_infer() {
let type_section: TypeSection =
deserialize_buffer(types_test_payload()).expect("type_section be deserialized");
let t1 = match &type_section.types()[1] {
&Type::Function(ref func_type) => func_type
};
assert_eq!(Some(ValueType::I64), t1.return_type());
assert_eq!(2, t1.params().len());
}
fn export_payload() -> &'static [u8] {
&[
// section id
0x07,
// section length
28,
// 6 entries
6,
// func "A", index 6
// [name_len(1-5 bytes), name_bytes(name_len, internal_kind(1byte), internal_index(1-5 bytes)])
0x01, 0x41, 0x01, 0x86, 0x80, 0x00,
// func "B", index 8
0x01, 0x42, 0x01, 0x86, 0x00,
// func "C", index 7
0x01, 0x43, 0x01, 0x07,
// memory "D", index 0
0x01, 0x44, 0x02, 0x00,
// func "E", index 1
0x01, 0x45, 0x01, 0x01,
// func "F", index 2
0x01, 0x46, 0x01, 0x02
]
}
#[test]
fn export_detect() {
let section: Section =
deserialize_buffer(export_payload()).expect("section to be deserialized");
match section {
Section::Export(_) => {},
_ => {
panic!("Payload should be recognized as export section")
}
}
}
fn code_payload() -> &'static [u8] {
&[
// sectionid
0x0Au8,
// section length, 32
0x20,
// body count
0x01,
// body 1, length 30
0x1E,
0x01, 0x01, 0x7F, // local i32 (one collection of length one of type i32)
0x02, 0x7F, // block i32
0x23, 0x00, // get_global 0
0x21, 0x01, // set_local 1
0x23, 0x00, // get_global 0
0x20, 0x00, // get_local 0
0x6A, // i32.add
0x24, 0x00, // set_global 0
0x23, 0x00, // get_global 0
0x41, 0x0F, // i32.const 15
0x6A, // i32.add
0x41, 0x70, // i32.const -16
0x71, // i32.and
0x24, 0x00, // set_global 0
0x20, 0x01, // get_local 1
0x0B,
0x0B,
]
}
#[test]
fn code_detect() {
let section: Section =
deserialize_buffer(code_payload()).expect("section to be deserialized");
match section {
Section::Code(_) => {},
_ => {
panic!("Payload should be recognized as a code section")
}
}
}
fn data_payload() -> &'static [u8] {
&[
0x0bu8, // section id
20, // 20 bytes overall
0x01, // number of segments
0x00, // index
0x0b, // just `end` op
0x10,
// 16x 0x00
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00
]
}
#[test]
fn data_section_ser() {
let data_section = DataSection::with_entries(
vec![DataSegment::new(0u32, Some(InitExpr::empty()), vec![0u8; 16], false)]
);
let buf = serialize(data_section).expect("Data section to be serialized");
assert_eq!(buf, vec![
20u8, // 19 bytes overall
0x01, // number of segments
0x00, // index
0x0b, // just `end` op
16, // value of length 16
0x00, 0x00, 0x00, 0x00, // 16x 0x00 as in initialization
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00
]);
}
#[test]
fn data_section_detect() {
let section: Section =
deserialize_buffer(data_payload()).expect("section to be deserialized");
match section {
Section::Data(_) => {},
_ => {
panic!("Payload should be recognized as a data section")
}
}
}
#[test]
fn element_section_ser() {
let element_section = ElementSection::with_entries(
vec![ElementSegment::new(0u32, Some(InitExpr::empty()), vec![0u32; 4], false)]
);
let buf = serialize(element_section).expect("Element section to be serialized");
assert_eq!(buf, vec![
08u8, // 8 bytes overall
0x01, // number of segments
0x00, // index
0x0b, // just `end` op
0x04, // 4 elements
0x00, 0x00, 0x00, 0x00 // 4x 0x00 as in initialization
]);
}
#[test]
fn code_section_ser() {
use super::super::Instruction::*;
let code_section = CodeSection::with_bodies(
vec![
FuncBody::new(
vec![Local::new(1, ValueType::I32)],
Instructions::new(vec![
Block(BlockType::Value(ValueType::I32)),
GetGlobal(0),
End,
End,
])
)
]);
let buf = serialize(code_section).expect("Code section to be serialized");
assert_eq!(buf, vec![
11u8, // 11 bytes total section size
0x01, // 1 function
9, // function #1 total code size
1, // 1 local variable declaration
1, // amount of variables
0x7f, // type of variable (7-bit, -0x01), negative
0x02, // block
0x7f, // block return type (7-bit, -0x01), negative
0x23, 0x00, // get_global(0)
0x0b, // block end
0x0b, // function end
]);
}
#[test]
fn start_section() {
let section: Section = deserialize_buffer(&[08u8, 01u8, 00u8]).expect("Start section to deserialize");
if let Section::Start(_) = section {
} else {
panic!("Payload should be a start section");
}
let serialized = serialize(section).expect("Start section to successfully serializen");
assert_eq!(serialized, vec![08u8, 01u8, 00u8]);
}
}
| 24.336547 | 104 | 0.652046 |