Auto merge of #43345 - matthewhammer:master, r=nikomatsakis
Profile queries This PR implements the "profile queries" debugging feature described here: https://github.com/rust-lang-nursery/rust-forge/blob/master/profile-queries.md In particular, it implements the debugging flag `-Z profile-queries` FYI: This PR is my second attempt at pushing these changes. My original PR required a rebase; I have now done that rebase manually, after messing up with git's "interactive" rebase support. The original (now closed/cancelled) PR is this one: https://github.com/rust-lang/rust/issues/43156 r? @nikomatsakis
This commit is contained in:
commit
2c0558f635
9 changed files with 782 additions and 0 deletions
|
|
@ -12,6 +12,7 @@ use rustc_data_structures::fx::FxHashMap;
|
||||||
use session::config::OutputType;
|
use session::config::OutputType;
|
||||||
use std::cell::{Ref, RefCell};
|
use std::cell::{Ref, RefCell};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use util::common::{ProfileQueriesMsg, profq_msg};
|
||||||
|
|
||||||
use super::dep_node::{DepNode, DepKind, WorkProductId};
|
use super::dep_node::{DepNode, DepKind, WorkProductId};
|
||||||
use super::query::DepGraphQuery;
|
use super::query::DepGraphQuery;
|
||||||
|
|
@ -118,7 +119,13 @@ impl DepGraph {
|
||||||
{
|
{
|
||||||
if let Some(ref data) = self.data {
|
if let Some(ref data) = self.data {
|
||||||
data.edges.borrow_mut().push_task(key);
|
data.edges.borrow_mut().push_task(key);
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
profq_msg(ProfileQueriesMsg::TaskBegin(key.clone()))
|
||||||
|
};
|
||||||
let result = task(cx, arg);
|
let result = task(cx, arg);
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
profq_msg(ProfileQueriesMsg::TaskEnd)
|
||||||
|
};
|
||||||
let dep_node_index = data.edges.borrow_mut().pop_task(key);
|
let dep_node_index = data.edges.borrow_mut().pop_task(key);
|
||||||
(result, dep_node_index)
|
(result, dep_node_index)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -995,6 +995,10 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
|
||||||
"dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"),
|
"dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"),
|
||||||
query_dep_graph: bool = (false, parse_bool, [UNTRACKED],
|
query_dep_graph: bool = (false, parse_bool, [UNTRACKED],
|
||||||
"enable queries of the dependency graph for regression testing"),
|
"enable queries of the dependency graph for regression testing"),
|
||||||
|
profile_queries: bool = (false, parse_bool, [UNTRACKED],
|
||||||
|
"trace and profile the queries of the incremental compilation framework"),
|
||||||
|
profile_queries_and_keys: bool = (false, parse_bool, [UNTRACKED],
|
||||||
|
"trace and profile the queries and keys of the incremental compilation framework"),
|
||||||
no_analysis: bool = (false, parse_bool, [UNTRACKED],
|
no_analysis: bool = (false, parse_bool, [UNTRACKED],
|
||||||
"parse and expand the source, but run no analysis"),
|
"parse and expand the source, but run no analysis"),
|
||||||
extra_plugins: Vec<String> = (Vec::new(), parse_list, [TRACKED],
|
extra_plugins: Vec<String> = (Vec::new(), parse_list, [TRACKED],
|
||||||
|
|
|
||||||
|
|
@ -389,6 +389,13 @@ impl Session {
|
||||||
}
|
}
|
||||||
pub fn verbose(&self) -> bool { self.opts.debugging_opts.verbose }
|
pub fn verbose(&self) -> bool { self.opts.debugging_opts.verbose }
|
||||||
pub fn time_passes(&self) -> bool { self.opts.debugging_opts.time_passes }
|
pub fn time_passes(&self) -> bool { self.opts.debugging_opts.time_passes }
|
||||||
|
pub fn profile_queries(&self) -> bool {
|
||||||
|
self.opts.debugging_opts.profile_queries ||
|
||||||
|
self.opts.debugging_opts.profile_queries_and_keys
|
||||||
|
}
|
||||||
|
pub fn profile_queries_and_keys(&self) -> bool {
|
||||||
|
self.opts.debugging_opts.profile_queries_and_keys
|
||||||
|
}
|
||||||
pub fn count_llvm_insns(&self) -> bool {
|
pub fn count_llvm_insns(&self) -> bool {
|
||||||
self.opts.debugging_opts.count_llvm_insns
|
self.opts.debugging_opts.count_llvm_insns
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -28,6 +28,7 @@ use ty::steal::Steal;
|
||||||
use ty::subst::Substs;
|
use ty::subst::Substs;
|
||||||
use ty::fast_reject::SimplifiedType;
|
use ty::fast_reject::SimplifiedType;
|
||||||
use util::nodemap::{DefIdSet, NodeSet};
|
use util::nodemap::{DefIdSet, NodeSet};
|
||||||
|
use util::common::{profq_msg, ProfileQueriesMsg};
|
||||||
|
|
||||||
use rustc_data_structures::indexed_vec::IndexVec;
|
use rustc_data_structures::indexed_vec::IndexVec;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
|
|
@ -513,6 +514,29 @@ impl<'tcx> QueryDescription for queries::lint_levels<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If enabled, send a message to the profile-queries thread
|
||||||
|
macro_rules! profq_msg {
|
||||||
|
($tcx:expr, $msg:expr) => {
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
if $tcx.sess.profile_queries() {
|
||||||
|
profq_msg($msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If enabled, format a key using its debug string, which can be
|
||||||
|
// expensive to compute (in terms of time).
|
||||||
|
macro_rules! profq_key {
|
||||||
|
($tcx:expr, $key:expr) => {
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
if $tcx.sess.profile_queries_and_keys() {
|
||||||
|
Some(format!("{:?}", $key))
|
||||||
|
} else { None }
|
||||||
|
} else { None }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
macro_rules! define_maps {
|
macro_rules! define_maps {
|
||||||
(<$tcx:tt>
|
(<$tcx:tt>
|
||||||
$($(#[$attr:meta])*
|
$($(#[$attr:meta])*
|
||||||
|
|
@ -539,6 +563,12 @@ macro_rules! define_maps {
|
||||||
$($(#[$attr])* $name($K)),*
|
$($(#[$attr])* $name($K)),*
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(bad_style)]
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
pub enum QueryMsg {
|
||||||
|
$($name(Option<String>)),*
|
||||||
|
}
|
||||||
|
|
||||||
impl<$tcx> Query<$tcx> {
|
impl<$tcx> Query<$tcx> {
|
||||||
pub fn describe(&self, tcx: TyCtxt) -> String {
|
pub fn describe(&self, tcx: TyCtxt) -> String {
|
||||||
match *self {
|
match *self {
|
||||||
|
|
@ -581,10 +611,20 @@ macro_rules! define_maps {
|
||||||
key,
|
key,
|
||||||
span);
|
span);
|
||||||
|
|
||||||
|
profq_msg!(tcx,
|
||||||
|
ProfileQueriesMsg::QueryBegin(
|
||||||
|
span.clone(),
|
||||||
|
QueryMsg::$name(profq_key!(tcx, key))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
if let Some(&(ref result, dep_node_index)) = tcx.maps.$name.borrow().map.get(&key) {
|
if let Some(&(ref result, dep_node_index)) = tcx.maps.$name.borrow().map.get(&key) {
|
||||||
tcx.dep_graph.read_index(dep_node_index);
|
tcx.dep_graph.read_index(dep_node_index);
|
||||||
|
profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
|
||||||
return Ok(f(result));
|
return Ok(f(result));
|
||||||
}
|
}
|
||||||
|
// else, we are going to run the provider:
|
||||||
|
profq_msg!(tcx, ProfileQueriesMsg::ProviderBegin);
|
||||||
|
|
||||||
// FIXME(eddyb) Get more valid Span's on queries.
|
// FIXME(eddyb) Get more valid Span's on queries.
|
||||||
// def_span guard is necessary to prevent a recursive loop,
|
// def_span guard is necessary to prevent a recursive loop,
|
||||||
|
|
@ -612,6 +652,7 @@ macro_rules! define_maps {
|
||||||
tcx.dep_graph.with_task(dep_node, tcx, key, run_provider)
|
tcx.dep_graph.with_task(dep_node, tcx, key, run_provider)
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
|
profq_msg!(tcx, ProfileQueriesMsg::ProviderEnd);
|
||||||
|
|
||||||
tcx.dep_graph.read_index(dep_node_index);
|
tcx.dep_graph.read_index(dep_node_index);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,11 @@ use std::iter::repeat;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
|
use std::sync::mpsc::{Sender};
|
||||||
|
use syntax_pos::{Span};
|
||||||
|
use ty::maps::{QueryMsg};
|
||||||
|
use dep_graph::{DepNode};
|
||||||
|
|
||||||
// The name of the associated type for `Fn` return types
|
// The name of the associated type for `Fn` return types
|
||||||
pub const FN_OUTPUT_NAME: &'static str = "Output";
|
pub const FN_OUTPUT_NAME: &'static str = "Output";
|
||||||
|
|
||||||
|
|
@ -29,6 +34,72 @@ pub struct ErrorReported;
|
||||||
|
|
||||||
thread_local!(static TIME_DEPTH: Cell<usize> = Cell::new(0));
|
thread_local!(static TIME_DEPTH: Cell<usize> = Cell::new(0));
|
||||||
|
|
||||||
|
/// Initialized for -Z profile-queries
|
||||||
|
thread_local!(static PROFQ_CHAN: RefCell<Option<Sender<ProfileQueriesMsg>>> = RefCell::new(None));
|
||||||
|
|
||||||
|
/// Parameters to the `Dump` variant of type `ProfileQueriesMsg`.
|
||||||
|
#[derive(Clone,Debug)]
|
||||||
|
pub struct ProfQDumpParams {
|
||||||
|
/// A base path for the files we will dump
|
||||||
|
pub path:String,
|
||||||
|
/// To ensure that the compiler waits for us to finish our dumps
|
||||||
|
pub ack:Sender<()>,
|
||||||
|
/// toggle dumping a log file with every `ProfileQueriesMsg`
|
||||||
|
pub dump_profq_msg_log:bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A sequence of these messages induce a trace of query-based incremental compilation.
|
||||||
|
/// FIXME(matthewhammer): Determine whether we should include cycle detection here or not.
|
||||||
|
#[derive(Clone,Debug)]
|
||||||
|
pub enum ProfileQueriesMsg {
|
||||||
|
/// begin a timed pass
|
||||||
|
TimeBegin(String),
|
||||||
|
/// end a timed pass
|
||||||
|
TimeEnd,
|
||||||
|
/// begin a task (see dep_graph::graph::with_task)
|
||||||
|
TaskBegin(DepNode),
|
||||||
|
/// end a task
|
||||||
|
TaskEnd,
|
||||||
|
/// begin a new query
|
||||||
|
QueryBegin(Span, QueryMsg),
|
||||||
|
/// query is satisfied by using an already-known value for the given key
|
||||||
|
CacheHit,
|
||||||
|
/// query requires running a provider; providers may nest, permitting queries to nest.
|
||||||
|
ProviderBegin,
|
||||||
|
/// query is satisfied by a provider terminating with a value
|
||||||
|
ProviderEnd,
|
||||||
|
/// dump a record of the queries to the given path
|
||||||
|
Dump(ProfQDumpParams),
|
||||||
|
/// halt the profiling/monitoring background thread
|
||||||
|
Halt
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If enabled, send a message to the profile-queries thread
|
||||||
|
pub fn profq_msg(msg: ProfileQueriesMsg) {
|
||||||
|
PROFQ_CHAN.with(|sender|{
|
||||||
|
if let Some(s) = sender.borrow().as_ref() {
|
||||||
|
s.send(msg).unwrap()
|
||||||
|
} else {
|
||||||
|
// Do nothing.
|
||||||
|
//
|
||||||
|
// FIXME(matthewhammer): Multi-threaded translation phase triggers the panic below.
|
||||||
|
// From backtrace: rustc_trans::back::write::spawn_work::{{closure}}.
|
||||||
|
//
|
||||||
|
// panic!("no channel on which to send profq_msg: {:?}", msg)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set channel for profile queries channel
|
||||||
|
pub fn profq_set_chan(s: Sender<ProfileQueriesMsg>) -> bool {
|
||||||
|
PROFQ_CHAN.with(|chan|{
|
||||||
|
if chan.borrow().is_none() {
|
||||||
|
*chan.borrow_mut() = Some(s);
|
||||||
|
true
|
||||||
|
} else { false }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Read the current depth of `time()` calls. This is used to
|
/// Read the current depth of `time()` calls. This is used to
|
||||||
/// encourage indentation across threads.
|
/// encourage indentation across threads.
|
||||||
pub fn time_depth() -> usize {
|
pub fn time_depth() -> usize {
|
||||||
|
|
@ -53,9 +124,15 @@ pub fn time<T, F>(do_it: bool, what: &str, f: F) -> T where
|
||||||
r
|
r
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
profq_msg(ProfileQueriesMsg::TimeBegin(what.to_string()))
|
||||||
|
};
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
let rv = f();
|
let rv = f();
|
||||||
let dur = start.elapsed();
|
let dur = start.elapsed();
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
profq_msg(ProfileQueriesMsg::TimeEnd)
|
||||||
|
};
|
||||||
|
|
||||||
print_time_passes_entry_internal(what, dur);
|
print_time_passes_entry_internal(what, dur);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -64,6 +64,8 @@ use arena::DroplessArena;
|
||||||
|
|
||||||
use derive_registrar;
|
use derive_registrar;
|
||||||
|
|
||||||
|
use profile;
|
||||||
|
|
||||||
pub fn compile_input(sess: &Session,
|
pub fn compile_input(sess: &Session,
|
||||||
cstore: &CStore,
|
cstore: &CStore,
|
||||||
input: &Input,
|
input: &Input,
|
||||||
|
|
@ -105,6 +107,10 @@ pub fn compile_input(sess: &Session,
|
||||||
sess.abort_if_errors();
|
sess.abort_if_errors();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if sess.profile_queries() {
|
||||||
|
profile::begin();
|
||||||
|
}
|
||||||
|
|
||||||
// We need nested scopes here, because the intermediate results can keep
|
// We need nested scopes here, because the intermediate results can keep
|
||||||
// large chunks of memory alive and we want to free them as soon as
|
// large chunks of memory alive and we want to free them as soon as
|
||||||
// possible to keep the peak memory usage low
|
// possible to keep the peak memory usage low
|
||||||
|
|
@ -537,6 +543,10 @@ pub fn phase_1_parse_input<'a>(control: &CompileController,
|
||||||
-> PResult<'a, ast::Crate> {
|
-> PResult<'a, ast::Crate> {
|
||||||
sess.diagnostic().set_continue_after_error(control.continue_parse_after_error);
|
sess.diagnostic().set_continue_after_error(control.continue_parse_after_error);
|
||||||
|
|
||||||
|
if sess.profile_queries() {
|
||||||
|
profile::begin();
|
||||||
|
}
|
||||||
|
|
||||||
let krate = time(sess.time_passes(), "parsing", || {
|
let krate = time(sess.time_passes(), "parsing", || {
|
||||||
match *input {
|
match *input {
|
||||||
Input::File(ref file) => {
|
Input::File(ref file) => {
|
||||||
|
|
@ -1120,6 +1130,10 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
"translation",
|
"translation",
|
||||||
move || trans::trans_crate(tcx, analysis, incremental_hashes_map, output_filenames));
|
move || trans::trans_crate(tcx, analysis, incremental_hashes_map, output_filenames));
|
||||||
|
|
||||||
|
if tcx.sess.profile_queries() {
|
||||||
|
profile::dump("profile_queries".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
translation
|
translation
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -104,6 +104,7 @@ use syntax_pos::{DUMMY_SP, MultiSpan};
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test;
|
mod test;
|
||||||
|
|
||||||
|
pub mod profile;
|
||||||
pub mod driver;
|
pub mod driver;
|
||||||
pub mod pretty;
|
pub mod pretty;
|
||||||
pub mod target_features;
|
pub mod target_features;
|
||||||
|
|
|
||||||
316
src/librustc_driver/profile/mod.rs
Normal file
316
src/librustc_driver/profile/mod.rs
Normal file
|
|
@ -0,0 +1,316 @@
|
||||||
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
use rustc::util::common::{ProfQDumpParams, ProfileQueriesMsg, profq_msg, profq_set_chan};
|
||||||
|
use std::sync::mpsc::{Receiver};
|
||||||
|
use std::io::{Write};
|
||||||
|
use rustc::dep_graph::{DepNode};
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
|
pub mod trace;
|
||||||
|
|
||||||
|
/// begin a profile thread, if not already running
|
||||||
|
pub fn begin() {
|
||||||
|
use std::thread;
|
||||||
|
use std::sync::mpsc::{channel};
|
||||||
|
let (tx, rx) = channel();
|
||||||
|
if profq_set_chan(tx) {
|
||||||
|
thread::spawn(move||profile_queries_thread(rx));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// dump files with profiling information to the given base path, and
|
||||||
|
/// wait for this dump to complete.
|
||||||
|
///
|
||||||
|
/// wraps the RPC (send/recv channel logic) of requesting a dump.
|
||||||
|
pub fn dump(path:String) {
|
||||||
|
use std::sync::mpsc::{channel};
|
||||||
|
let (tx, rx) = channel();
|
||||||
|
let params = ProfQDumpParams{
|
||||||
|
path, ack:tx,
|
||||||
|
// FIXME: Add another compiler flag to toggle whether this log
|
||||||
|
// is written; false for now
|
||||||
|
dump_profq_msg_log:true,
|
||||||
|
};
|
||||||
|
profq_msg(ProfileQueriesMsg::Dump(params));
|
||||||
|
let _ = rx.recv().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
// State for parsing recursive trace structure in separate thread, via messages
|
||||||
|
#[derive(Clone, Eq, PartialEq)]
|
||||||
|
enum ParseState {
|
||||||
|
// No (local) parse state; may be parsing a tree, focused on a
|
||||||
|
// sub-tree that could be anything.
|
||||||
|
Clear,
|
||||||
|
// Have Query information from the last message
|
||||||
|
HaveQuery(trace::Query, Instant),
|
||||||
|
// Have "time-begin" information from the last message (doit flag, and message)
|
||||||
|
HaveTimeBegin(String, Instant),
|
||||||
|
// Have "task-begin" information from the last message
|
||||||
|
HaveTaskBegin(DepNode, Instant),
|
||||||
|
}
|
||||||
|
struct StackFrame {
|
||||||
|
pub parse_st: ParseState,
|
||||||
|
pub traces: Vec<trace::Rec>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn total_duration(traces: &Vec<trace::Rec>) -> Duration {
|
||||||
|
let mut sum : Duration = Duration::new(0,0);
|
||||||
|
for t in traces.iter() { sum += t.dur_total; }
|
||||||
|
return sum
|
||||||
|
}
|
||||||
|
|
||||||
|
// profiling thread; retains state (in local variables) and dump traces, upon request.
|
||||||
|
fn profile_queries_thread(r:Receiver<ProfileQueriesMsg>) {
|
||||||
|
use self::trace::*;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::time::{Instant};
|
||||||
|
|
||||||
|
let mut profq_msgs : Vec<ProfileQueriesMsg> = vec![];
|
||||||
|
let mut frame : StackFrame = StackFrame{ parse_st:ParseState::Clear, traces:vec![] };
|
||||||
|
let mut stack : Vec<StackFrame> = vec![];
|
||||||
|
loop {
|
||||||
|
let msg = r.recv();
|
||||||
|
if let Err(_recv_err) = msg {
|
||||||
|
// FIXME: Perhaps do something smarter than simply quitting?
|
||||||
|
break
|
||||||
|
};
|
||||||
|
let msg = msg.unwrap();
|
||||||
|
debug!("profile_queries_thread: {:?}", msg);
|
||||||
|
|
||||||
|
// Meta-level versus _actual_ queries messages
|
||||||
|
match msg {
|
||||||
|
ProfileQueriesMsg::Halt => return,
|
||||||
|
ProfileQueriesMsg::Dump(params) => {
|
||||||
|
assert!(stack.len() == 0);
|
||||||
|
assert!(frame.parse_st == ParseState::Clear);
|
||||||
|
{
|
||||||
|
// write log of all messages
|
||||||
|
if params.dump_profq_msg_log {
|
||||||
|
let mut log_file =
|
||||||
|
File::create(format!("{}.log.txt", params.path)).unwrap();
|
||||||
|
for m in profq_msgs.iter() {
|
||||||
|
writeln!(&mut log_file, "{:?}", m).unwrap()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// write HTML file, and counts file
|
||||||
|
let html_path = format!("{}.html", params.path);
|
||||||
|
let mut html_file = File::create(&html_path).unwrap();
|
||||||
|
|
||||||
|
let counts_path = format!("{}.counts.txt", params.path);
|
||||||
|
let mut counts_file = File::create(&counts_path).unwrap();
|
||||||
|
|
||||||
|
write!(html_file, "<html>\n").unwrap();
|
||||||
|
write!(html_file,
|
||||||
|
"<head>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n",
|
||||||
|
"profile_queries.css").unwrap();
|
||||||
|
write!(html_file, "<style>\n").unwrap();
|
||||||
|
trace::write_style(&mut html_file);
|
||||||
|
write!(html_file, "</style>\n").unwrap();
|
||||||
|
write!(html_file, "</head>\n").unwrap();
|
||||||
|
write!(html_file, "<body>\n").unwrap();
|
||||||
|
trace::write_traces(&mut html_file, &mut counts_file, &frame.traces);
|
||||||
|
write!(html_file, "</body>\n</html>\n").unwrap();
|
||||||
|
|
||||||
|
let ack_path = format!("{}.ack", params.path);
|
||||||
|
let ack_file = File::create(&ack_path).unwrap();
|
||||||
|
drop(ack_file);
|
||||||
|
|
||||||
|
// Tell main thread that we are done, e.g., so it can exit
|
||||||
|
params.ack.send(()).unwrap();
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Actual query message:
|
||||||
|
msg => {
|
||||||
|
// Record msg in our log
|
||||||
|
profq_msgs.push(msg.clone());
|
||||||
|
// Respond to the message, knowing that we've already handled Halt and Dump, above.
|
||||||
|
match (frame.parse_st.clone(), msg) {
|
||||||
|
(_,ProfileQueriesMsg::Halt) => unreachable!(),
|
||||||
|
(_,ProfileQueriesMsg::Dump(_)) => unreachable!(),
|
||||||
|
|
||||||
|
// Parse State: Clear
|
||||||
|
(ParseState::Clear,
|
||||||
|
ProfileQueriesMsg::QueryBegin(span,querymsg)) => {
|
||||||
|
let start = Instant::now();
|
||||||
|
frame.parse_st = ParseState::HaveQuery
|
||||||
|
(Query{span:span, msg:querymsg}, start)
|
||||||
|
},
|
||||||
|
(ParseState::Clear,
|
||||||
|
ProfileQueriesMsg::CacheHit) => {
|
||||||
|
panic!("parse error: unexpected CacheHit; expected QueryBegin")
|
||||||
|
},
|
||||||
|
(ParseState::Clear,
|
||||||
|
ProfileQueriesMsg::ProviderBegin) => {
|
||||||
|
panic!("parse error: expected QueryBegin before beginning a provider")
|
||||||
|
},
|
||||||
|
(ParseState::Clear,
|
||||||
|
ProfileQueriesMsg::ProviderEnd) => {
|
||||||
|
let provider_extent = frame.traces;
|
||||||
|
match stack.pop() {
|
||||||
|
None =>
|
||||||
|
panic!("parse error: expected a stack frame; found an empty stack"),
|
||||||
|
Some(old_frame) => {
|
||||||
|
match old_frame.parse_st {
|
||||||
|
ParseState::HaveQuery(q, start) => {
|
||||||
|
let duration = start.elapsed();
|
||||||
|
frame = StackFrame{
|
||||||
|
parse_st:ParseState::Clear,
|
||||||
|
traces:old_frame.traces
|
||||||
|
};
|
||||||
|
let dur_extent = total_duration(&provider_extent);
|
||||||
|
let trace = Rec {
|
||||||
|
effect: Effect::QueryBegin(q, CacheCase::Miss),
|
||||||
|
extent: Box::new(provider_extent),
|
||||||
|
start: start,
|
||||||
|
dur_self: duration - dur_extent,
|
||||||
|
dur_total: duration,
|
||||||
|
};
|
||||||
|
frame.traces.push( trace );
|
||||||
|
},
|
||||||
|
_ => panic!("internal parse error: malformed parse stack")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
|
||||||
|
(ParseState::Clear,
|
||||||
|
ProfileQueriesMsg::TimeBegin(msg)) => {
|
||||||
|
let start = Instant::now();
|
||||||
|
frame.parse_st = ParseState::HaveTimeBegin(msg, start);
|
||||||
|
stack.push(frame);
|
||||||
|
frame = StackFrame{parse_st:ParseState::Clear, traces:vec![]};
|
||||||
|
},
|
||||||
|
(_, ProfileQueriesMsg::TimeBegin(_)) =>
|
||||||
|
panic!("parse error; did not expect time begin here"),
|
||||||
|
|
||||||
|
(ParseState::Clear,
|
||||||
|
ProfileQueriesMsg::TimeEnd) => {
|
||||||
|
let provider_extent = frame.traces;
|
||||||
|
match stack.pop() {
|
||||||
|
None =>
|
||||||
|
panic!("parse error: expected a stack frame; found an empty stack"),
|
||||||
|
Some(old_frame) => {
|
||||||
|
match old_frame.parse_st {
|
||||||
|
ParseState::HaveTimeBegin(msg, start) => {
|
||||||
|
let duration = start.elapsed();
|
||||||
|
frame = StackFrame{
|
||||||
|
parse_st:ParseState::Clear,
|
||||||
|
traces:old_frame.traces
|
||||||
|
};
|
||||||
|
let dur_extent = total_duration(&provider_extent);
|
||||||
|
let trace = Rec {
|
||||||
|
effect: Effect::TimeBegin(msg),
|
||||||
|
extent: Box::new(provider_extent),
|
||||||
|
start: start,
|
||||||
|
dur_total: duration,
|
||||||
|
dur_self: duration - dur_extent,
|
||||||
|
};
|
||||||
|
frame.traces.push( trace );
|
||||||
|
},
|
||||||
|
_ => panic!("internal parse error: malformed parse stack")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
(_, ProfileQueriesMsg::TimeEnd) => { panic!("parse error") }
|
||||||
|
|
||||||
|
(ParseState::Clear,
|
||||||
|
ProfileQueriesMsg::TaskBegin(key)) => {
|
||||||
|
let start = Instant::now();
|
||||||
|
frame.parse_st = ParseState::HaveTaskBegin(key, start);
|
||||||
|
stack.push(frame);
|
||||||
|
frame = StackFrame{parse_st:ParseState::Clear, traces:vec![]};
|
||||||
|
},
|
||||||
|
(_, ProfileQueriesMsg::TaskBegin(_)) =>
|
||||||
|
panic!("parse error; did not expect time begin here"),
|
||||||
|
|
||||||
|
(ParseState::Clear,
|
||||||
|
ProfileQueriesMsg::TaskEnd) => {
|
||||||
|
let provider_extent = frame.traces;
|
||||||
|
match stack.pop() {
|
||||||
|
None =>
|
||||||
|
panic!("parse error: expected a stack frame; found an empty stack"),
|
||||||
|
Some(old_frame) => {
|
||||||
|
match old_frame.parse_st {
|
||||||
|
ParseState::HaveTaskBegin(key, start) => {
|
||||||
|
let duration = start.elapsed();
|
||||||
|
frame = StackFrame{
|
||||||
|
parse_st:ParseState::Clear,
|
||||||
|
traces:old_frame.traces
|
||||||
|
};
|
||||||
|
let dur_extent = total_duration(&provider_extent);
|
||||||
|
let trace = Rec {
|
||||||
|
effect: Effect::TaskBegin(key),
|
||||||
|
extent: Box::new(provider_extent),
|
||||||
|
start: start,
|
||||||
|
dur_total: duration,
|
||||||
|
dur_self: duration - dur_extent,
|
||||||
|
};
|
||||||
|
frame.traces.push( trace );
|
||||||
|
},
|
||||||
|
_ => panic!("internal parse error: malformed parse stack")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
(_, ProfileQueriesMsg::TaskEnd) => { panic!("parse error") }
|
||||||
|
|
||||||
|
// Parse State: HaveQuery
|
||||||
|
(ParseState::HaveQuery(q,start),
|
||||||
|
ProfileQueriesMsg::CacheHit) => {
|
||||||
|
let duration = start.elapsed();
|
||||||
|
let trace : Rec = Rec{
|
||||||
|
effect: Effect::QueryBegin(q, CacheCase::Hit),
|
||||||
|
extent: Box::new(vec![]),
|
||||||
|
start: start,
|
||||||
|
dur_self: duration,
|
||||||
|
dur_total: duration,
|
||||||
|
};
|
||||||
|
frame.traces.push( trace );
|
||||||
|
frame.parse_st = ParseState::Clear;
|
||||||
|
},
|
||||||
|
(ParseState::HaveQuery(_,_),
|
||||||
|
ProfileQueriesMsg::ProviderBegin) => {
|
||||||
|
stack.push(frame);
|
||||||
|
frame = StackFrame{parse_st:ParseState::Clear, traces:vec![]};
|
||||||
|
},
|
||||||
|
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// Parse errors:
|
||||||
|
|
||||||
|
(ParseState::HaveQuery(q,_),
|
||||||
|
ProfileQueriesMsg::ProviderEnd) => {
|
||||||
|
panic!("parse error: unexpected ProviderEnd; \
|
||||||
|
expected something else to follow BeginQuery for {:?}", q)
|
||||||
|
},
|
||||||
|
(ParseState::HaveQuery(q1,_),
|
||||||
|
ProfileQueriesMsg::QueryBegin(span2,querymsg2)) => {
|
||||||
|
panic!("parse error: unexpected QueryBegin; \
|
||||||
|
earlier query is unfinished: {:?} and now {:?}",
|
||||||
|
q1, Query{span:span2, msg:querymsg2})
|
||||||
|
},
|
||||||
|
|
||||||
|
(ParseState::HaveTimeBegin(_, _), _) => {
|
||||||
|
unreachable!()
|
||||||
|
},
|
||||||
|
(ParseState::HaveTaskBegin(_, _), _) => {
|
||||||
|
unreachable!()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
315
src/librustc_driver/profile/trace.rs
Normal file
315
src/librustc_driver/profile/trace.rs
Normal file
|
|
@ -0,0 +1,315 @@
|
||||||
|
// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
use syntax_pos::Span;
|
||||||
|
use rustc::ty::maps::QueryMsg;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
use std::collections::hash_map::HashMap;
|
||||||
|
use rustc::dep_graph::{DepNode};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub struct Query {
|
||||||
|
pub span: Span,
|
||||||
|
pub msg: QueryMsg,
|
||||||
|
}
|
||||||
|
pub enum Effect {
|
||||||
|
QueryBegin(Query, CacheCase),
|
||||||
|
TimeBegin(String),
|
||||||
|
TaskBegin(DepNode),
|
||||||
|
}
|
||||||
|
pub enum CacheCase {
|
||||||
|
Hit, Miss
|
||||||
|
}
|
||||||
|
/// Recursive trace structure
|
||||||
|
pub struct Rec {
|
||||||
|
pub effect: Effect,
|
||||||
|
pub start: Instant,
|
||||||
|
pub dur_self: Duration,
|
||||||
|
pub dur_total: Duration,
|
||||||
|
pub extent: Box<Vec<Rec>>,
|
||||||
|
}
|
||||||
|
pub struct QueryMetric {
|
||||||
|
pub count: usize,
|
||||||
|
pub dur_self: Duration,
|
||||||
|
pub dur_total: Duration,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cons_of_query_msg(q: &trace::Query) -> String {
|
||||||
|
let s = format!("{:?}", q.msg);
|
||||||
|
let cons: Vec<&str> = s.split(|d| d == '(' || d == '{').collect();
|
||||||
|
assert!(cons.len() > 0 && cons[0] != "");
|
||||||
|
cons[0].to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn cons_of_key(k: &DepNode) -> String {
|
||||||
|
let s = format!("{:?}", k);
|
||||||
|
let cons: Vec<&str> = s.split(|d| d == '(' || d == '{').collect();
|
||||||
|
assert!(cons.len() > 0 && cons[0] != "");
|
||||||
|
cons[0].to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
// First return value is text; second return value is a CSS class
|
||||||
|
pub fn html_of_effect(eff: &Effect) -> (String, String) {
|
||||||
|
match *eff {
|
||||||
|
Effect::TimeBegin(ref msg) => {
|
||||||
|
(msg.clone(),
|
||||||
|
format!("time-begin"))
|
||||||
|
},
|
||||||
|
Effect::TaskBegin(ref key) => {
|
||||||
|
let cons = cons_of_key(key);
|
||||||
|
(cons.clone(), format!("{} task-begin", cons))
|
||||||
|
},
|
||||||
|
Effect::QueryBegin(ref qmsg, ref cc) => {
|
||||||
|
let cons = cons_of_query_msg(qmsg);
|
||||||
|
(cons.clone(),
|
||||||
|
format!("{} {}",
|
||||||
|
cons,
|
||||||
|
match *cc {
|
||||||
|
CacheCase::Hit => "hit",
|
||||||
|
CacheCase::Miss => "miss",
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// First return value is text; second return value is a CSS class
|
||||||
|
fn html_of_duration(_start: &Instant, dur: &Duration) -> (String, String) {
|
||||||
|
use rustc::util::common::duration_to_secs_str;
|
||||||
|
(duration_to_secs_str(dur.clone()),
|
||||||
|
"".to_string()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn html_of_fraction(frac: f64) -> (String, String) {
|
||||||
|
let css = {
|
||||||
|
if frac > 0.50 { format!("frac-50") }
|
||||||
|
else if frac > 0.40 { format!("frac-40") }
|
||||||
|
else if frac > 0.30 { format!("frac-30") }
|
||||||
|
else if frac > 0.20 { format!("frac-20") }
|
||||||
|
else if frac > 0.10 { format!("frac-10") }
|
||||||
|
else if frac > 0.05 { format!("frac-05") }
|
||||||
|
else if frac > 0.02 { format!("frac-02") }
|
||||||
|
else if frac > 0.01 { format!("frac-01") }
|
||||||
|
else if frac > 0.001 { format!("frac-001") }
|
||||||
|
else { format!("frac-0") }
|
||||||
|
};
|
||||||
|
let percent = frac * 100.0;
|
||||||
|
if percent > 0.1 { (format!("{:.1}%", percent), css) }
|
||||||
|
else { (format!("< 0.1%", ), css) }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn total_duration(traces: &Vec<Rec>) -> Duration {
|
||||||
|
let mut sum : Duration = Duration::new(0,0);
|
||||||
|
for t in traces.iter() {
|
||||||
|
sum += t.dur_total;
|
||||||
|
}
|
||||||
|
return sum
|
||||||
|
}
|
||||||
|
|
||||||
|
fn duration_div(nom: Duration, den: Duration) -> f64 {
|
||||||
|
fn to_nanos(d: Duration) -> u64 {
|
||||||
|
d.as_secs() * 1_000_000_000 + d.subsec_nanos() as u64
|
||||||
|
}
|
||||||
|
|
||||||
|
to_nanos(nom) as f64 / to_nanos(den) as f64
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_traces_rec(file: &mut File, traces: &Vec<Rec>, total: Duration, depth: usize) {
|
||||||
|
for t in traces {
|
||||||
|
let (eff_text, eff_css_classes) = html_of_effect(&t.effect);
|
||||||
|
let (dur_text, dur_css_classes) = html_of_duration(&t.start, &t.dur_total);
|
||||||
|
let fraction = duration_div(t.dur_total, total);
|
||||||
|
let percent = fraction * 100.0;
|
||||||
|
let (frc_text, frc_css_classes) = html_of_fraction(fraction);
|
||||||
|
write!(file, "<div class=\"trace depth-{} extent-{}{} {} {} {}\">\n",
|
||||||
|
depth,
|
||||||
|
t.extent.len(),
|
||||||
|
/* Heuristic for 'important' CSS class: */
|
||||||
|
if t.extent.len() > 5 || percent >= 1.0 {
|
||||||
|
" important" }
|
||||||
|
else { "" },
|
||||||
|
eff_css_classes,
|
||||||
|
dur_css_classes,
|
||||||
|
frc_css_classes,
|
||||||
|
).unwrap();
|
||||||
|
write!(file, "<div class=\"eff\">{}</div>\n", eff_text).unwrap();
|
||||||
|
write!(file, "<div class=\"dur\">{}</div>\n", dur_text).unwrap();
|
||||||
|
write!(file, "<div class=\"frc\">{}</div>\n", frc_text).unwrap();
|
||||||
|
write_traces_rec(file, &t.extent, total, depth + 1);
|
||||||
|
write!(file, "</div>\n").unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_counts_rec(counts: &mut HashMap<String,QueryMetric>, traces: &Vec<Rec>) {
|
||||||
|
for t in traces.iter() {
|
||||||
|
match t.effect {
|
||||||
|
Effect::TimeBegin(ref msg) => {
|
||||||
|
let qm = match counts.get(msg) {
|
||||||
|
Some(_qm) => { panic!("TimeBegin with non-unique, repeat message") }
|
||||||
|
None => QueryMetric{
|
||||||
|
count: 1,
|
||||||
|
dur_self: t.dur_self,
|
||||||
|
dur_total: t.dur_total,
|
||||||
|
}};
|
||||||
|
counts.insert(msg.clone(), qm);
|
||||||
|
},
|
||||||
|
Effect::TaskBegin(ref key) => {
|
||||||
|
let cons = cons_of_key(key);
|
||||||
|
let qm = match counts.get(&cons) {
|
||||||
|
Some(qm) =>
|
||||||
|
QueryMetric{
|
||||||
|
count: qm.count + 1,
|
||||||
|
dur_self: qm.dur_self + t.dur_self,
|
||||||
|
dur_total: qm.dur_total + t.dur_total,
|
||||||
|
},
|
||||||
|
None => QueryMetric{
|
||||||
|
count: 1,
|
||||||
|
dur_self: t.dur_self,
|
||||||
|
dur_total: t.dur_total,
|
||||||
|
}};
|
||||||
|
counts.insert(cons, qm);
|
||||||
|
},
|
||||||
|
Effect::QueryBegin(ref qmsg, ref _cc) => {
|
||||||
|
let qcons = cons_of_query_msg(qmsg);
|
||||||
|
let qm = match counts.get(&qcons) {
|
||||||
|
Some(qm) =>
|
||||||
|
QueryMetric{
|
||||||
|
count: qm.count + 1,
|
||||||
|
dur_total: qm.dur_total + t.dur_total,
|
||||||
|
dur_self: qm.dur_self + t.dur_self
|
||||||
|
},
|
||||||
|
None => QueryMetric{
|
||||||
|
count: 1,
|
||||||
|
dur_total: t.dur_total,
|
||||||
|
dur_self: t.dur_self,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
counts.insert(qcons, qm);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
compute_counts_rec(counts, &t.extent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_counts(count_file: &mut File, counts: &mut HashMap<String,QueryMetric>) {
|
||||||
|
use rustc::util::common::duration_to_secs_str;
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
let mut data = vec![];
|
||||||
|
for (ref cons, ref qm) in counts.iter() {
|
||||||
|
data.push((cons.clone(), qm.count.clone(), qm.dur_total.clone(), qm.dur_self.clone()));
|
||||||
|
};
|
||||||
|
data.sort_by(|&(_,_,_,self1),&(_,_,_,self2)|
|
||||||
|
if self1 > self2 { Ordering::Less } else { Ordering::Greater } );
|
||||||
|
for (cons, count, dur_total, dur_self) in data {
|
||||||
|
write!(count_file, "{}, {}, {}, {}\n",
|
||||||
|
cons, count,
|
||||||
|
duration_to_secs_str(dur_total),
|
||||||
|
duration_to_secs_str(dur_self)
|
||||||
|
).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_traces(html_file: &mut File, counts_file: &mut File, traces: &Vec<Rec>) {
|
||||||
|
let mut counts : HashMap<String,QueryMetric> = HashMap::new();
|
||||||
|
compute_counts_rec(&mut counts, traces);
|
||||||
|
write_counts(counts_file, &mut counts);
|
||||||
|
|
||||||
|
let total : Duration = total_duration(traces);
|
||||||
|
write_traces_rec(html_file, traces, total, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_style(html_file: &mut File) {
|
||||||
|
write!(html_file,"{}", "
|
||||||
|
body {
|
||||||
|
font-family: sans-serif;
|
||||||
|
background: black;
|
||||||
|
}
|
||||||
|
.trace {
|
||||||
|
color: black;
|
||||||
|
display: inline-block;
|
||||||
|
border-style: solid;
|
||||||
|
border-color: red;
|
||||||
|
border-width: 1px;
|
||||||
|
border-radius: 5px;
|
||||||
|
padding: 0px;
|
||||||
|
margin: 1px;
|
||||||
|
font-size: 0px;
|
||||||
|
}
|
||||||
|
.task-begin {
|
||||||
|
border-width: 1px;
|
||||||
|
color: white;
|
||||||
|
border-color: #ff8;
|
||||||
|
font-size: 0px;
|
||||||
|
}
|
||||||
|
.miss {
|
||||||
|
border-color: red;
|
||||||
|
border-width: 1px;
|
||||||
|
}
|
||||||
|
.extent-0 {
|
||||||
|
padding: 2px;
|
||||||
|
}
|
||||||
|
.time-begin {
|
||||||
|
border-width: 4px;
|
||||||
|
font-size: 12px;
|
||||||
|
color: white;
|
||||||
|
border-color: #afa;
|
||||||
|
}
|
||||||
|
.important {
|
||||||
|
border-width: 3px;
|
||||||
|
font-size: 12px;
|
||||||
|
color: white;
|
||||||
|
border-color: #f77;
|
||||||
|
}
|
||||||
|
.hit {
|
||||||
|
padding: 0px;
|
||||||
|
border-color: blue;
|
||||||
|
border-width: 3px;
|
||||||
|
}
|
||||||
|
.eff {
|
||||||
|
color: #fff;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
.frc {
|
||||||
|
color: #7f7;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
.dur {
|
||||||
|
display: none
|
||||||
|
}
|
||||||
|
.frac-50 {
|
||||||
|
padding: 10px;
|
||||||
|
border-width: 10px;
|
||||||
|
font-size: 32px;
|
||||||
|
}
|
||||||
|
.frac-40 {
|
||||||
|
padding: 8px;
|
||||||
|
border-width: 8px;
|
||||||
|
font-size: 24px;
|
||||||
|
}
|
||||||
|
.frac-30 {
|
||||||
|
padding: 6px;
|
||||||
|
border-width: 6px;
|
||||||
|
font-size: 18px;
|
||||||
|
}
|
||||||
|
.frac-20 {
|
||||||
|
padding: 4px;
|
||||||
|
border-width: 6px;
|
||||||
|
font-size: 16px;
|
||||||
|
}
|
||||||
|
.frac-10 {
|
||||||
|
padding: 2px;
|
||||||
|
border-width: 6px;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
").unwrap();
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue