diff --git a/.gitignore b/.gitignore index bddcc82b..dc1a6fca 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,4 @@ pyzx.egg-info *.ipynb_checkpoints pyzx/js/d3.v5.js *~ +.DS_Store \ No newline at end of file diff --git a/benchmark.py b/benchmark.py deleted file mode 100644 index caee59dd..00000000 --- a/benchmark.py +++ /dev/null @@ -1,139 +0,0 @@ -import sys -from pathlib import Path -import pyzx as zx -import os -import time -import multiprocessing as mp - - -class CircuitComparer: - def __init__(self, dirname, before, after): - self.fname_before = os.path.join(dirname, before) - if after: - self.fname_after = os.path.join(dirname, after) - else: - self.fname_after = "" - self.fname_tpar = "" - if before.find('before') != -1: - self.name = before[:-7] - else: - self.name = before - self.has_run = False - - def __str__(self): - return "CircuitComparer({}, {})".format(self.name, str(self.has_run)) - - def __repr__(self): - return str(self) - - def run(self): - if self.has_run: return True - #try: - if self.fname_after: - c = zx.Circuit.from_quipper_file(self.fname_after).to_basic_gates() - self.t_opt = c.tcount() - else: - self.t_opt = '-' - c = zx.Circuit.load(self.fname_before).to_basic_gates() - self.qubits = c.qubits - #except TypeError: return False - if self.fname_tpar: - c2 = zx.Circuit.load(self.fname_tpar) - self.tpar = c2.tcount() - else: self.tpar = "-" - self.gatecount = len(c.gates) - self.t_before = c.tcount() - g = c.to_graph() - t = time.time() - while True: - zx.simplify.full_reduce(g) - break - m = zx.rules.match_gadgets_phasepoly(g) - if not m: break - zx.rules.apply_gadget_phasepoly(g, m) - self.t_after = zx.tcount(g) - self.time_simpl = time.time() - t - t = time.time() - self.extracts = True - try: - c2 = zx.extract.extract_circuit(g,quiet=True) - self.time_extr = time.time() - t - except Exception: - self.extracts = False - self.time_extr = None - self.has_run = True - del c, g - return True - - def pretty(self): - if not self.has_run: - success = self.run() - else: success = True - if not success: - return self.name + " -" - s = self.name.ljust(20) + str(self.qubits).rjust(7) - s += str(self.gatecount).rjust(8) + str(self.t_before).rjust(9) + str(self.t_opt).rjust(10) - s += str(self.tpar).rjust(6) + str(self.t_after).rjust(7) - s += "{:.2f}".format(self.time_simpl).rjust(12) - time_extr = "{:.2f}".format(self.time_extr) if self.time_extr is not None else "-" - s += time_extr.rjust(14) - #s += ("y" if self.extracts else "n").rjust(7) - return s - -def runner(arg): - c, printlock = arg - s = c.pretty() - with printlock: - print(s) - sys.stdout.flush() - return s - -if __name__ == '__main__': - circ_dir = Path('circuits') - dirs = [circ_dir / 'Arithmetic_and_Toffoli', - circ_dir / 'QFT_and_Adders', - circ_dir / 'Other'] - beforefiles = [] - afterfiles = [] - tparfiles = [] - for d in dirs: - for f in os.listdir(d): - if not os.path.isfile(os.path.join(d,f)): continue - if f.find('before') != -1: - beforefiles.append((f,d)) - elif f.find('tpar') != -1: - tparfiles.append((f,d)) - elif f.find('.qc') != -1 or f.find('.tfc') != -1: - beforefiles.append((f,d)) - else: afterfiles.append((f,d)) - - circuits = [] - for f, d in beforefiles: - n = f[:-7] - for f2,d2 in afterfiles: - if d!=d2: continue - if f2.startswith(n): - c = CircuitComparer(d, f, f2) - circuits.append(c) - break - else: - c = CircuitComparer(d, f, '') - circuits.append(c) - for f2,d2 in tparfiles: - if d!=d2: continue - if f2.startswith(n): - circuits[-1].fname_tpar = os.path.join(d2,f2) - - nprocesses = 4 - m = mp.Manager() - printlock = m.Lock() - pool = mp.Pool(processes=nprocesses) - print("Circuit".ljust(20), "qubits", "G-count", "T-before", "T-kitchen", "T-par", " T-us", " Time-Simp", "Time-Extract") - try: - strings = pool.map(runner, [(c,printlock) for c in circuits]) - finally: - pool.terminate() - strings.sort() - print("\n\n") - print("Circuit".ljust(20), "qubits", "G-count", "T-before", "T-kitchen", "T-par", " T-us", " Time-Simp", "Time-Extract") - print("\n".join(strings)) diff --git a/benchmarking.py b/benchmarking.py new file mode 100644 index 00000000..d564292d --- /dev/null +++ b/benchmarking.py @@ -0,0 +1,410 @@ +import os +import sys +import dill # type: ignore +import numpy as np +from pyzx.circuit import Circuit +import random +from typing import Callable, Dict, List, Set, Tuple, Optional, Union +import pandas as pd +from IPython.display import display +from tqdm import tqdm # type: ignore +from time import perf_counter +import matplotlib.pyplot as plt +import pyzx as zx + +plt.style.use('seaborn-whitegrid') +pd.set_option('display.max_columns', None) +pd.set_option('display.max_rows', None) +pd.set_option('display.max_colwidth', None) + +class Benchmark: + """Class for benchmarking circuit simplification functions""" + def __init__(self, dirpath: Optional[str] = None): + # callable functions which take simplify a circuit: {func_name: func} + self.funcs: Dict[str, Callable[..., Circuit]] = dict() + # list of simlification strategies of which the simplified circuits have been directly imported + self.routines: Set[str] = set() + # unsimplified circuits: {group_name: [circuit_names]} + self.circuit_groups: Dict[str, List[str]] = dict() + # simplified circuits: {circuit_name: {func_name: [circuit, qubit_count, gate_count, 2_count, T_count, t_opt]}} + self.circuits: Dict[str, Dict[str, List[Union[Circuit, int, Optional[float]]]]] = dict() + # randomly generated circuit data {parameters: [seed, {func_name: [gate_count, 2_count, T_count]}]} + self.rand_data: Dict[str, List[Union[int, Dict[str, List[float]]]]] = dict() + + if dirpath: # load from saved files + if not os.path.isdir(dirpath): raise Exception(f'{dirpath} is not a directory.') + try: + with open(dirpath+'/funcs.pkl', 'rb') as f: + self.funcs = dill.load(f) + with open(dirpath+'/circuit_groups.pkl', 'rb') as f: + self.circuit_groups = dill.load(f) + with open(dirpath+'/circuits.pkl', 'rb') as f: + self.circuits = dill.load(f) + with open(dirpath+'/rand_data.pkl', 'rb') as f: + self.rand_data = dill.load(f) + with open(dirpath+'/routines.pkl', 'rb') as f: + self.routines = dill.load(f) + except: raise Exception(f'{dirpath} does not contain the correct datafiles') + + def save(self, dirpath: str) -> None: + """Saves the benchmark data to dirpath""" + if not os.path.isdir(dirpath): raise Exception(f'{dirpath} is not a directory.') + with open(dirpath+'/funcs.pkl', 'wb') as f: + dill.dump(self.funcs,f) + with open(dirpath+'/circuit_groups.pkl', 'wb') as f: + dill.dump(self.circuit_groups,f) + with open(dirpath+'/circuits.pkl', 'wb') as f: + dill.dump(self.circuits,f) + with open(dirpath+'/rand_data.pkl', 'wb') as f: + dill.dump(self.rand_data,f) + with open(dirpath+'/routines.pkl', 'wb') as f: + dill.dump(self.routines,f) + + def show_attributes(self): + """Displays which functions/circuit groups have been loaded, and a table for which have been run""" + atts = ['Qubits','Gates','2Q Count','T Count','t_opt'] + print(f'Circuit attributes: {atts}') + if len(self.funcs) == 0: print('No loaded functions') + else: print(f'Loaded functions: {list(self.funcs.keys())}') + if len(self.circuit_groups.keys()) == 0: print('No loaded routines') + else: print(f'Loaded routines: {list(self.routines)}') + if len(self.circuits) == 0: + print('No circuits added') + return + if len(self.circuit_groups) == 0: print('No loaded unsimplified circuit groups') + else: + print(f'Loaded circuit groups: {list(self.circuit_groups.keys())}') + groups = list(self.circuit_groups.keys()) + strats = ['Original'] + sorted(list(self.funcs.keys())+list(self.routines)) + df = pd.DataFrame(index = groups, columns = strats) + for g in groups: + for s in strats: + run = '-' + for c in self.circuit_groups[g]: + if s in self.circuits[c].keys(): + run = 'Y' + break + df.at[g, s] = run + display(df) + + def load_circuits(self, dirname: str, group_name: Optional[str] = None, simp_strategy: str ='Original', extension: Optional[str] = None) -> None: + """Loads circuits from a directory, for either the original circuits or pre-simplified versions + + Args: + dirname (str): directory in which circuits are located + group_name (str, optional): the name for the group of circuits being loaded. Defaults to dirname. + simp_strategy (str, optional): if circuits have been pre-simplified the name of the simplification strategy. Defaults to 'Original'. + extension (str, optional): extension of circuits in directory. Defaults to None. + """ + if not group_name: group_name = str(dirname) + if simp_strategy == 'Original': self.circuit_groups[group_name] = [] + else: self.routines.add(simp_strategy) + + for c in [f for f in os.listdir(dirname) if not f.startswith('.')]: + if not os.path.isfile(os.path.join(dirname,c)): continue + if not extension or c.find(extension) != -1: + try: + circ = Circuit.load(os.path.join(dirname, c)).to_basic_gates() + except: + print(f'Circuit {c} failed to load') + continue + circ_name = os.path.splitext(c)[0] + if simp_strategy == 'Original': + self.circuit_groups[group_name].append(circ_name) + if circ_name not in self.circuits.keys(): self.circuits[circ_name] = dict() + self.circuits[circ_name][simp_strategy] = [circ, circ.qubits, len(circ.gates), circ.twoqubitcount(), circ.tcount(), None] + + def add_simplification_func(self, func: Callable[..., Circuit], name: str, groups_to_run: Optional[List[str]] = ['all'], verify=False, rerun = False) -> None: + """Loads a simplification function + + Args: + func (Callable[Circuit]): callable function should take a Circuit as an input and output either a Circuit or a tuple (Circuit, t_simp, t_opt) + name (str): name for the function + groups_to_run (List[str], optional): groups of circuits to immediately run the function on. Defaults to 'all'. + """ + self.funcs[name] = func + if groups_to_run: self.run(funcs_to_run = [name], groups_to_run=groups_to_run, verify=verify, rerun=rerun) + + def del_simplification_funcs(self, funcs: List[str]) -> None: + """Deletes simplification functions + + Args: + funcs (List[str]): list of simplification function names + """ + for func_name in funcs: + if func_name in self.funcs.keys(): del self.funcs[func_name] + for circuit_name, value in self.circuits.items(): + if func_name in value.keys(): del self.circuits[circuit_name][func_name] + for parameters, value2 in self.rand_data.items(): + if func_name in value2[1].keys(): # type: ignore + del self.rand_data[parameters][1][func_name] # type: ignore + + def run(self, funcs_to_run: List[str] = ['all'], groups_to_run: List[str] = ['all'], verify: bool = False, rerun: bool = False) -> None: + """Runs a series of functions on a series of groups of circuits + + Args: + funcs_to_run (List[str], optional): names of loaded functions to run. Defaults to 'all'. + groups_to_run (List[str], optional): names of loaded groups of circuits to run. Defaults to 'all'. + rerun (bool, optional): rerun circuit even if function has already been run on it. Defaults to False. + """ + if funcs_to_run == ['all']: funcs_to_run = list(self.funcs.keys()) + if groups_to_run == ['all']: groups_to_run = list(self.circuit_groups.keys()) + for group_name in groups_to_run: + if group_name not in self.circuit_groups.keys(): + print(f'The group of circuits {group_name} has not been added. Call benchmark.show_attributes() to see loaded group names.') + continue + + groups = [g for gn, g in self.circuit_groups.items() if gn in groups_to_run] + circuits = [c for g in groups for c in g] + pbar = tqdm(circuits) + for circ_name in pbar: + for func_name in funcs_to_run: + if func_name not in self.funcs.keys(): + print(f'The function {func_name} has not been added. Call benchmark.show_attributes() to see loaded functions.') + continue + if func_name in self.circuits[circ_name].keys() and not rerun: continue + pbar.set_description("{:<70}".format(f'Processing {func_name} on {circ_name}')) + t0 = perf_counter() + opt_circ = self.funcs[func_name](self.circuits[circ_name]['Original'][0]) + t_opt = round(perf_counter() - t0,2) + if verify: + c_id = self.circuits[circ_name]['Original'][0].adjoint() # type: ignore + c_id.add_circuit(opt_circ) + g = c_id.to_graph() + zx.simplify.full_reduce(g) + if g.num_vertices() != 2*len(g.inputs()): + print(f'Circuit resulting from {func_name} on {circ_name} not verified',flush=True) + continue + self.circuits[circ_name][func_name] = [opt_circ, opt_circ.qubits, len(opt_circ.gates), opt_circ.twoqubitcount(), opt_circ.tcount(), t_opt] + + @staticmethod + def generate_cliffordT_circuit(qubits: int, depth: int, p_cnot: float, p_t: float) -> Circuit: + """Generates a random clifford + T circuit + + Args: + qubits (int): number of qubits + depth (int): depth of circuit + p_cnot (float): probabilitiy of a CNOT gate + p_t (float): probability of a T gate + + Returns: + Circuit: Random circuit + """ + p_s = 0.5*(1.0-p_cnot-p_t) + p_had = 0.5*(1.0-p_cnot-p_t) + c = Circuit(qubits) + for _ in range(depth): + r = random.random() + if r > 1-p_had: c.add_gate("HAD",random.randrange(qubits)) + elif r > 1-p_had-p_s: c.add_gate("S",random.randrange(qubits)) + elif r > 1-p_had-p_s-p_t: c.add_gate("T",random.randrange(qubits)) + else: + tgt = random.randrange(qubits) + while True: + ctrl = random.randrange(qubits) + if ctrl!=tgt: break + c.add_gate("CNOT",tgt,ctrl) + return c + + def generate_data(self, qubits: int, depth: int, cnot_prob: float, t_prob: float, funcs_to_run: List[str] = ['all'], reps: int = 50, overwrite: bool = False, random_seed: Optional[int] = None, pbar: tqdm = None) -> None: + """Runs a series of functions on randomly generated Clifford + T circuits and stores the average result for each function + + Args: + qubits (int): number of qubits + depth (int): depth of circuit + cnot_prob (float): probaility of a CNOT gate + t_prob (float): probability of a T gate + funcs_to_run (List[str], optional): names of loaded functions to run. Defaults to 'all'. + reps (int, optional): number of repeats for each parameter set. Defaults to 50. + overwrite (bool, optional): overwrite current data if it exists. Defaults to False. + random_seed (int, optional): random.random seed. Defaults to None. + pbar (tqdm, optional): tqdm progress bar. Defaults to None. + """ + params = f'{qubits}_{depth}_{cnot_prob}_{t_prob}_{reps}' + if params in self.rand_data.keys() and not overwrite: + seed = self.rand_data[params][0] + if random_seed and random_seed != seed: + seed = random_seed + self.rand_data[params] = [seed, dict()] + run = [] + else: + run = list(self.rand_data[params][1].keys()) # type: ignore + else: + if random_seed: seed = random_seed + else: seed = random.randrange(sys.maxsize) + self.rand_data[params] = [seed, dict()] + run = [] + + random.seed(seed) # type: ignore + circuits = [self.generate_cliffordT_circuit(qubits, depth, cnot_prob, t_prob) for _ in range(reps)] + + if 'Original' not in run: + count = [0, 0, 0] + for c in circuits: + count[0] += len(c.gates) + count[1] += c.twoqubitcount() + count[2] += c.tcount() + count = [x/reps for x in count] # type: ignore + self.rand_data[params][1]['Original'] = count # type: ignore + + if funcs_to_run == ['all']: funcs_to_run = self.funcs.keys() # type: ignore + for func_name in funcs_to_run: + if func_name not in self.funcs.keys(): + print(f'The function {func_name} has not been added. Call benchmark.show_attributes() to see loaded functions.') + continue + if func_name in run and not overwrite: continue + + if pbar: pbar.set_description("{:<50}".format(f'Processing {func_name} on P_t = {t_prob}')) + count = [0, 0, 0] + for c in circuits: + c = self.generate_cliffordT_circuit(qubits, depth, cnot_prob, t_prob) + res = self.funcs[func_name](c) + if not isinstance(res, tuple): c2 = res + else: c2 = res[0] + count[0] += len(c2.gates) + count[1] += c2.twoqubitcount() + count[2] += c2.tcount() + count = [x/reps for x in count] # type: ignore + self.rand_data[params][1][func_name] = count # type: ignore + + def Pt_graphs(self, funcs: List[str], qubits: int, depth: int, cnot_prob: float, t_probs: List[float], ys: List[str] = ['Gates','2Q Count','T Count'], reps: int = 50, overwrite: bool = False, figsize: List[int] = [7,5], random_seed: Optional[int] = None) -> plt.figure: + """Produces a series of graphs for circuit simplification metrics for random circuits with a range of T gate probabilites + + Args: + funcs (List[str]): names of loaded functions to display on graphs + qubits (int): number of qubits + depth (int): depth of circuits + cnot_prob (float): probability of CNOT gates + t_probs (List[float]): range of T gate probabilites + ys (List[str], optional): which metrics to print out. Defaults to ['Gates','2Q Count','T Count']. Options limited to subsets of this set. + reps (int, optional): number of repeats for each parameter set. Defaults to 50. + overwrite (bool, optional): overwrite current data if it exists. Defaults to False. + figsize (List[int, int], optional): figure size for each plot. Defaults to [7,5]. + random_seed (int, optional): random.random seed. Defaults to None. + + Returns: + plt.figure: matplotlib.pyplot figure with a subplot for each metric in ys + """ + for func_name in funcs[:]: + if func_name not in self.funcs.keys(): + print(f'The function {func_name} has not been added. Call benchmark.show_attributes() to see loaded functions.') + funcs.remove(func_name) + + g_count = np.empty((len(funcs)+1, 0)).tolist() + two_count = np.empty((len(funcs)+1, 0)).tolist() + t_count = np.empty((len(funcs)+1, 0)).tolist() + + pbar = tqdm(t_probs) + for t_prob in pbar: + self.generate_data(qubits, depth, cnot_prob, t_prob, funcs_to_run=funcs, reps=reps, overwrite=overwrite, random_seed=random_seed, pbar=pbar) + params = f'{qubits}_{depth}_{cnot_prob}_{t_prob}_{reps}' + for i,func_name in enumerate(['Original']+funcs): + count = self.rand_data[params][1][func_name] # type: ignore + g_count[i].append(count[0]) + two_count[i].append(count[1]) + t_count[i].append(count[2]) + + stats = [y for y in ys if y in ['Gates','2Q Count','T Count']] + fs = figsize[:] + fs[0] *= len(stats) + fig = plt.figure(figsize=fs) + + if 'Gates' in stats: + ax1 = fig.add_subplot(1,len(stats),stats.index('Gates')+1) + ax1.set_ylabel("Total Gate Count") + ax1.set_xlabel("$P_t$") + ax1.grid(color='#EEEEEE') + if '2Q Count' in stats: + ax2 = fig.add_subplot(1, len(stats),stats.index('2Q Count')+1) + ax2.set_ylabel("2-Qubit Gate Count") + ax2.set_xlabel("$P_t$") + ax2.grid(color='#EEEEEE') + if 'T Count' in stats: + ax3 = fig.add_subplot(1, len(stats),stats.index('T Count')+1) + ax3.set_ylabel("T Gate Count") + ax3.set_xlabel("$P_t$") + ax3.grid(color='#EEEEEE') + + for i, func_name in enumerate(['Originial']+funcs): + if 'Gates' in stats: ax1.plot(t_probs, g_count[i], marker="o" ,markersize=3, linestyle=':', label=func_name) + if '2Q Count' in stats: ax2.plot(t_probs, two_count[i], marker="o" ,markersize=3, linestyle=':', label=func_name) + if 'T Count' in stats: ax3.plot(t_probs, t_count[i], marker="o" ,markersize=3, linestyle=':', label=func_name) + + plt.legend(bbox_to_anchor=(0.11, -0.19), loc="lower left", + bbox_transform=fig.transFigure, ncol=3*len(stats), fancybox=True) + return fig + + @staticmethod + def table_style(styler: plt.style , cols: List[Tuple[str]]) -> plt.style: + styler.set_properties(**{'width':'8ch', 'text-align':'center'}).set_table_styles([dict(selector="th.col_heading.level1",props=[('text-align', 'center')])]) + styler.set_table_styles([{'selector': 'tr:hover', 'props': [('background-color', '#e1f7d5'),('color', 'black'),('font-weight', 'bold')]}, #cell_hover + {'selector': '.index_name', 'props': 'background-color: #232b2b; font-style: italic; color: white; font-weight:bold;'}, #index_names + {'selector': 'th:not(.index_name)', 'props': 'background-color: #232b2b; color: white;'}, #headers + {'selector': 'th.col_heading.level0', 'props': 'text-align: center; font-size: 1.2em; padding: 0.7em'}, + {'selector': 'td', 'props': 'text-align: center; font-size: 1em'}], overwrite=False) + if any('Gates' in col for col in cols): border_col = 'Gates' + elif any('2Q Count' in col for col in cols): border_col = '2Q Count' + elif any('T Count' in col for col in cols): border_col = 'T Count' + elif any('t_opt' in col for col in cols): border_col = 't_opt' + styler.set_table_styles(dict.fromkeys([col for col in cols if border_col in col or ('Original' in col and 'Qubits' in col)], [{'selector': 'th', 'props': 'border-left: 1px solid white !important'}, + {'selector': 'td', 'props': 'border-left: 1px solid white !important'}]), overwrite=False, axis=0) + styler.apply(lambda s: np.where(s==np.nanmin(s.values),'color:green',''), axis=1, subset=[col for col in cols if 'Gates' in col]) + styler.apply(lambda s: np.where(s==np.nanmin(s.values),'color:green',''), axis=1, subset=[col for col in cols if '2Q Count' in col]) + styler.apply(lambda s: np.where(s==np.nanmin(s.values),'color:green',''), axis=1, subset=[col for col in cols if 'T Count' in col]) + styler.format(subset=[col for col in cols if 't_opt' in col],precision=2, na_rep='-', thousands=",") + styler.format(subset=[col for col in cols if 't_opt' not in col],precision=0, na_rep='-', thousands=",") + return(styler) + + def df(self, groups: List[str] = ['all'], routines: List[str] = ['all'], funcs: List[str] = ['all'], atts: List[str] = ['all']) -> pd.DataFrame: + """Produces a pandas dataframe of metrics over benchmark circuits. + + Args: + groups (List[str], optional): group names for circuits for index. Defaults to 'all'. + routines (List[str], optional): names for routines for columns. Defaults to 'all'. + funcs (List[str], optional): names for functions for columns. Defaults to 'all'. + atts (List[str], optional): names for attributes to show for each function/routine. Defaults to 'all'. + + Returns: pd.DataFrame + """ + if groups==['all']: groups=list(self.circuit_groups.keys()) + if routines==['all']: routines=sorted(list(self.routines)) + else: + for r in routines[:]: + if r not in self.routines: + print(f'The routine {r} has not been added. Call .show_attributes() to see loaded routines.') + routines.remove(r) + if funcs==['all']: funcs=sorted(list(self.funcs)) + else: + for f in funcs[:]: + if f not in self.funcs.keys(): + print(f'The function {f} has not been added. Call .show_attributes() to see loaded functions.') + funcs.remove(f) + + all_atts = ['Qubits','Gates','2Q Count','T Count','t_opt','na'] + if atts == ['all']: atts = all_atts + match_atts = [True if att in atts else False for att in all_atts] + + heads = ['Original'] + routines + funcs + circs = [] + data = [] + for g in groups: + if g not in self.circuit_groups.keys(): + print(f'The group of circuits {g} has not been added. Call .show_attributes() to see loaded groups.') + continue + for c in self.circuit_groups[g]: + c_data = [] + for f in heads: + try: d = self.circuits[c][f][1:] + except: d = [np.nan]*6 + c_data.extend([x for i,x in enumerate(d) if match_atts[i]]) + circs.append(c) + data.append(c_data) + + df = pd.DataFrame(data=data, columns=pd.MultiIndex.from_product([heads,atts])) + df['Circuits'] = circs + df = df.set_index('Circuits') + df = df.sort_index() + df=df[[col for col in df.columns if 'Qubits' not in col or 'Original' in col]] + df=df.dropna(axis=1, how='all') + display(df.style.pipe(self.table_style, cols=df.columns)) + return df \ No newline at end of file diff --git a/circuits/Fast/QFT16_before b/circuits/Fast/QFT16_before deleted file mode 100644 index 0aba6fcd..00000000 --- a/circuits/Fast/QFT16_before +++ /dev/null @@ -1,590 +0,0 @@ -Inputs: 0:Qbit, 1:Qbit, 2:Qbit, 3:Qbit, 4:Qbit, 5:Qbit, 6:Qbit, 7:Qbit, 8:Qbit, 9:Qbit, 10:Qbit, 11:Qbit, 12:Qbit, 13:Qbit, 14:Qbit, 15:Qbit -Comment["ENTER: qft_big_endian"](0:"qs[0]", 1:"qs[1]", 2:"qs[2]", 3:"qs[3]", 4:"qs[4]", 5:"qs[5]", 6:"qs[6]", 7:"qs[7]", 8:"qs[8]", 9:"qs[9]", 10:"qs[10]", 11:"qs[11]", 12:"qs[12]", 13:"qs[13]", 14:"qs[14]", 15:"qs[15]") -QGate["H"](0) with nocontrol -QGate["not"](0) with controls=[+1] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](0) -QGate["not"](0) with controls=[+1] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](0) -QRot["exp(-i%Z)",3.9269908169872414e-1](1) -QGate["H"](1) with nocontrol -QGate["not"](0) with controls=[+2] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](0) -QGate["not"](0) with controls=[+2] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](0) -QRot["exp(-i%Z)",1.9634954084936207e-1](2) -QGate["not"](1) with controls=[+2] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](1) -QGate["not"](1) with controls=[+2] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](1) -QRot["exp(-i%Z)",3.9269908169872414e-1](2) -QGate["H"](2) with nocontrol -QGate["not"](0) with controls=[+3] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](0) -QGate["not"](0) with controls=[+3] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](0) -QRot["exp(-i%Z)",9.8174770424681035e-2](3) -QGate["not"](1) with controls=[+3] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](1) -QGate["not"](1) with controls=[+3] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](1) -QRot["exp(-i%Z)",1.9634954084936207e-1](3) -QGate["not"](2) with controls=[+3] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](2) -QGate["not"](2) with controls=[+3] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](2) -QRot["exp(-i%Z)",3.9269908169872414e-1](3) -QGate["H"](3) with nocontrol -QGate["not"](0) with controls=[+4] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](0) -QGate["not"](0) with controls=[+4] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](0) -QRot["exp(-i%Z)",4.9087385212340517e-2](4) -QGate["not"](1) with controls=[+4] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](1) -QGate["not"](1) with controls=[+4] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](1) -QRot["exp(-i%Z)",9.8174770424681035e-2](4) -QGate["not"](2) with controls=[+4] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](2) -QGate["not"](2) with controls=[+4] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](2) -QRot["exp(-i%Z)",1.9634954084936207e-1](4) -QGate["not"](3) with controls=[+4] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](3) -QGate["not"](3) with controls=[+4] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](3) -QRot["exp(-i%Z)",3.9269908169872414e-1](4) -QGate["H"](4) with nocontrol -QGate["not"](0) with controls=[+5] with nocontrol -QRot["exp(-i%Z)",-2.454369260617026e-2](0) -QGate["not"](0) with controls=[+5] with nocontrol -QRot["exp(-i%Z)",2.4543692606170259e-2](0) -QRot["exp(-i%Z)",2.4543692606170259e-2](5) -QGate["not"](1) with controls=[+5] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](1) -QGate["not"](1) with controls=[+5] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](1) -QRot["exp(-i%Z)",4.9087385212340517e-2](5) -QGate["not"](2) with controls=[+5] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](2) -QGate["not"](2) with controls=[+5] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](2) -QRot["exp(-i%Z)",9.8174770424681035e-2](5) -QGate["not"](3) with controls=[+5] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](3) -QGate["not"](3) with controls=[+5] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](3) -QRot["exp(-i%Z)",1.9634954084936207e-1](5) -QGate["not"](4) with controls=[+5] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](4) -QGate["not"](4) with controls=[+5] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](4) -QRot["exp(-i%Z)",3.9269908169872414e-1](5) -QGate["H"](5) with nocontrol -QGate["not"](0) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",-1.227184630308513e-2](0) -QGate["not"](0) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",1.2271846303085129e-2](0) -QRot["exp(-i%Z)",1.2271846303085129e-2](6) -QGate["not"](1) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",-2.454369260617026e-2](1) -QGate["not"](1) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",2.4543692606170259e-2](1) -QRot["exp(-i%Z)",2.4543692606170259e-2](6) -QGate["not"](2) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](2) -QGate["not"](2) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](2) -QRot["exp(-i%Z)",4.9087385212340517e-2](6) -QGate["not"](3) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](3) -QGate["not"](3) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](3) -QRot["exp(-i%Z)",9.8174770424681035e-2](6) -QGate["not"](4) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](4) -QGate["not"](4) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](4) -QRot["exp(-i%Z)",1.9634954084936207e-1](6) -QGate["not"](5) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](5) -QGate["not"](5) with controls=[+6] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](5) -QRot["exp(-i%Z)",3.9269908169872414e-1](6) -QGate["H"](6) with nocontrol -QGate["not"](0) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",-6.135923151542565e-3](0) -QGate["not"](0) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",6.1359231515425647e-3](0) -QRot["exp(-i%Z)",6.1359231515425647e-3](7) -QGate["not"](1) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",-1.227184630308513e-2](1) -QGate["not"](1) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",1.2271846303085129e-2](1) -QRot["exp(-i%Z)",1.2271846303085129e-2](7) -QGate["not"](2) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",-2.454369260617026e-2](2) -QGate["not"](2) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",2.4543692606170259e-2](2) -QRot["exp(-i%Z)",2.4543692606170259e-2](7) -QGate["not"](3) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](3) -QGate["not"](3) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](3) -QRot["exp(-i%Z)",4.9087385212340517e-2](7) -QGate["not"](4) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](4) -QGate["not"](4) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](4) -QRot["exp(-i%Z)",9.8174770424681035e-2](7) -QGate["not"](5) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](5) -QGate["not"](5) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](5) -QRot["exp(-i%Z)",1.9634954084936207e-1](7) -QGate["not"](6) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](6) -QGate["not"](6) with controls=[+7] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](6) -QRot["exp(-i%Z)",3.9269908169872414e-1](7) -QGate["H"](7) with nocontrol -QGate["not"](0) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",-3.067961575771282e-3](0) -QGate["not"](0) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",3.0679615757712823e-3](0) -QRot["exp(-i%Z)",3.0679615757712823e-3](8) -QGate["not"](1) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",-6.135923151542565e-3](1) -QGate["not"](1) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",6.1359231515425647e-3](1) -QRot["exp(-i%Z)",6.1359231515425647e-3](8) -QGate["not"](2) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",-1.227184630308513e-2](2) -QGate["not"](2) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",1.2271846303085129e-2](2) -QRot["exp(-i%Z)",1.2271846303085129e-2](8) -QGate["not"](3) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",-2.454369260617026e-2](3) -QGate["not"](3) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",2.4543692606170259e-2](3) -QRot["exp(-i%Z)",2.4543692606170259e-2](8) -QGate["not"](4) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](4) -QGate["not"](4) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](4) -QRot["exp(-i%Z)",4.9087385212340517e-2](8) -QGate["not"](5) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](5) -QGate["not"](5) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](5) -QRot["exp(-i%Z)",9.8174770424681035e-2](8) -QGate["not"](6) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](6) -QGate["not"](6) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](6) -QRot["exp(-i%Z)",1.9634954084936207e-1](8) -QGate["not"](7) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](7) -QGate["not"](7) with controls=[+8] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](7) -QRot["exp(-i%Z)",3.9269908169872414e-1](8) -QGate["H"](8) with nocontrol -QGate["not"](0) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",-1.533980787885641e-3](0) -QGate["not"](0) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",1.5339807878856412e-3](0) -QRot["exp(-i%Z)",1.5339807878856412e-3](9) -QGate["not"](1) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",-3.067961575771282e-3](1) -QGate["not"](1) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",3.0679615757712823e-3](1) -QRot["exp(-i%Z)",3.0679615757712823e-3](9) -QGate["not"](2) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",-6.135923151542565e-3](2) -QGate["not"](2) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",6.1359231515425647e-3](2) -QRot["exp(-i%Z)",6.1359231515425647e-3](9) -QGate["not"](3) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",-1.227184630308513e-2](3) -QGate["not"](3) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",1.2271846303085129e-2](3) -QRot["exp(-i%Z)",1.2271846303085129e-2](9) -QGate["not"](4) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",-2.454369260617026e-2](4) -QGate["not"](4) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",2.4543692606170259e-2](4) -QRot["exp(-i%Z)",2.4543692606170259e-2](9) -QGate["not"](5) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](5) -QGate["not"](5) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](5) -QRot["exp(-i%Z)",4.9087385212340517e-2](9) -QGate["not"](6) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](6) -QGate["not"](6) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](6) -QRot["exp(-i%Z)",9.8174770424681035e-2](9) -QGate["not"](7) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](7) -QGate["not"](7) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](7) -QRot["exp(-i%Z)",1.9634954084936207e-1](9) -QGate["not"](8) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](8) -QGate["not"](8) with controls=[+9] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](8) -QRot["exp(-i%Z)",3.9269908169872414e-1](9) -QGate["H"](9) with nocontrol -QGate["not"](0) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",-7.669903939428206e-4](0) -QGate["not"](0) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",7.6699039394282058e-4](0) -QRot["exp(-i%Z)",7.6699039394282058e-4](10) -QGate["not"](1) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",-1.533980787885641e-3](1) -QGate["not"](1) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",1.5339807878856412e-3](1) -QRot["exp(-i%Z)",1.5339807878856412e-3](10) -QGate["not"](2) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",-3.067961575771282e-3](2) -QGate["not"](2) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",3.0679615757712823e-3](2) -QRot["exp(-i%Z)",3.0679615757712823e-3](10) -QGate["not"](3) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",-6.135923151542565e-3](3) -QGate["not"](3) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",6.1359231515425647e-3](3) -QRot["exp(-i%Z)",6.1359231515425647e-3](10) -QGate["not"](4) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",-1.227184630308513e-2](4) -QGate["not"](4) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",1.2271846303085129e-2](4) -QRot["exp(-i%Z)",1.2271846303085129e-2](10) -QGate["not"](5) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",-2.454369260617026e-2](5) -QGate["not"](5) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",2.4543692606170259e-2](5) -QRot["exp(-i%Z)",2.4543692606170259e-2](10) -QGate["not"](6) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](6) -QGate["not"](6) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](6) -QRot["exp(-i%Z)",4.9087385212340517e-2](10) -QGate["not"](7) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](7) -QGate["not"](7) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](7) -QRot["exp(-i%Z)",9.8174770424681035e-2](10) -QGate["not"](8) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](8) -QGate["not"](8) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](8) -QRot["exp(-i%Z)",1.9634954084936207e-1](10) -QGate["not"](9) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](9) -QGate["not"](9) with controls=[+10] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](9) -QRot["exp(-i%Z)",3.9269908169872414e-1](10) -QGate["H"](10) with nocontrol -QGate["not"](0) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",-3.834951969714103e-4](0) -QGate["not"](0) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",3.8349519697141029e-4](0) -QRot["exp(-i%Z)",3.8349519697141029e-4](11) -QGate["not"](1) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",-7.669903939428206e-4](1) -QGate["not"](1) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",7.6699039394282058e-4](1) -QRot["exp(-i%Z)",7.6699039394282058e-4](11) -QGate["not"](2) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",-1.533980787885641e-3](2) -QGate["not"](2) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",1.5339807878856412e-3](2) -QRot["exp(-i%Z)",1.5339807878856412e-3](11) -QGate["not"](3) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",-3.067961575771282e-3](3) -QGate["not"](3) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",3.0679615757712823e-3](3) -QRot["exp(-i%Z)",3.0679615757712823e-3](11) -QGate["not"](4) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",-6.135923151542565e-3](4) -QGate["not"](4) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",6.1359231515425647e-3](4) -QRot["exp(-i%Z)",6.1359231515425647e-3](11) -QGate["not"](5) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",-1.227184630308513e-2](5) -QGate["not"](5) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",1.2271846303085129e-2](5) -QRot["exp(-i%Z)",1.2271846303085129e-2](11) -QGate["not"](6) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",-2.454369260617026e-2](6) -QGate["not"](6) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",2.4543692606170259e-2](6) -QRot["exp(-i%Z)",2.4543692606170259e-2](11) -QGate["not"](7) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](7) -QGate["not"](7) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](7) -QRot["exp(-i%Z)",4.9087385212340517e-2](11) -QGate["not"](8) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](8) -QGate["not"](8) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](8) -QRot["exp(-i%Z)",9.8174770424681035e-2](11) -QGate["not"](9) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](9) -QGate["not"](9) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](9) -QRot["exp(-i%Z)",1.9634954084936207e-1](11) -QGate["not"](10) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](10) -QGate["not"](10) with controls=[+11] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](10) -QRot["exp(-i%Z)",3.9269908169872414e-1](11) -QGate["H"](11) with nocontrol -QGate["not"](0) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-1.917475984857051e-4](0) -QGate["not"](0) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",1.9174759848570515e-4](0) -QRot["exp(-i%Z)",1.9174759848570515e-4](12) -QGate["not"](1) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-3.834951969714103e-4](1) -QGate["not"](1) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",3.8349519697141029e-4](1) -QRot["exp(-i%Z)",3.8349519697141029e-4](12) -QGate["not"](2) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-7.669903939428206e-4](2) -QGate["not"](2) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",7.6699039394282058e-4](2) -QRot["exp(-i%Z)",7.6699039394282058e-4](12) -QGate["not"](3) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-1.533980787885641e-3](3) -QGate["not"](3) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",1.5339807878856412e-3](3) -QRot["exp(-i%Z)",1.5339807878856412e-3](12) -QGate["not"](4) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-3.067961575771282e-3](4) -QGate["not"](4) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",3.0679615757712823e-3](4) -QRot["exp(-i%Z)",3.0679615757712823e-3](12) -QGate["not"](5) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-6.135923151542565e-3](5) -QGate["not"](5) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",6.1359231515425647e-3](5) -QRot["exp(-i%Z)",6.1359231515425647e-3](12) -QGate["not"](6) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-1.227184630308513e-2](6) -QGate["not"](6) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",1.2271846303085129e-2](6) -QRot["exp(-i%Z)",1.2271846303085129e-2](12) -QGate["not"](7) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-2.454369260617026e-2](7) -QGate["not"](7) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",2.4543692606170259e-2](7) -QRot["exp(-i%Z)",2.4543692606170259e-2](12) -QGate["not"](8) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](8) -QGate["not"](8) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](8) -QRot["exp(-i%Z)",4.9087385212340517e-2](12) -QGate["not"](9) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](9) -QGate["not"](9) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](9) -QRot["exp(-i%Z)",9.8174770424681035e-2](12) -QGate["not"](10) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](10) -QGate["not"](10) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](10) -QRot["exp(-i%Z)",1.9634954084936207e-1](12) -QGate["not"](11) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](11) -QGate["not"](11) with controls=[+12] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](11) -QRot["exp(-i%Z)",3.9269908169872414e-1](12) -QGate["H"](12) with nocontrol -QGate["not"](1) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-1.917475984857051e-4](1) -QGate["not"](1) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",1.9174759848570515e-4](1) -QRot["exp(-i%Z)",1.9174759848570515e-4](13) -QGate["not"](2) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-3.834951969714103e-4](2) -QGate["not"](2) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",3.8349519697141029e-4](2) -QRot["exp(-i%Z)",3.8349519697141029e-4](13) -QGate["not"](3) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-7.669903939428206e-4](3) -QGate["not"](3) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",7.6699039394282058e-4](3) -QRot["exp(-i%Z)",7.6699039394282058e-4](13) -QGate["not"](4) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-1.533980787885641e-3](4) -QGate["not"](4) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",1.5339807878856412e-3](4) -QRot["exp(-i%Z)",1.5339807878856412e-3](13) -QGate["not"](5) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-3.067961575771282e-3](5) -QGate["not"](5) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",3.0679615757712823e-3](5) -QRot["exp(-i%Z)",3.0679615757712823e-3](13) -QGate["not"](6) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-6.135923151542565e-3](6) -QGate["not"](6) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",6.1359231515425647e-3](6) -QRot["exp(-i%Z)",6.1359231515425647e-3](13) -QGate["not"](7) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-1.227184630308513e-2](7) -QGate["not"](7) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",1.2271846303085129e-2](7) -QRot["exp(-i%Z)",1.2271846303085129e-2](13) -QGate["not"](8) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-2.454369260617026e-2](8) -QGate["not"](8) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",2.4543692606170259e-2](8) -QRot["exp(-i%Z)",2.4543692606170259e-2](13) -QGate["not"](9) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](9) -QGate["not"](9) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](9) -QRot["exp(-i%Z)",4.9087385212340517e-2](13) -QGate["not"](10) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](10) -QGate["not"](10) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](10) -QRot["exp(-i%Z)",9.8174770424681035e-2](13) -QGate["not"](11) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](11) -QGate["not"](11) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](11) -QRot["exp(-i%Z)",1.9634954084936207e-1](13) -QGate["not"](12) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](12) -QGate["not"](12) with controls=[+13] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](12) -QRot["exp(-i%Z)",3.9269908169872414e-1](13) -QGate["H"](13) with nocontrol -QGate["not"](2) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-1.917475984857051e-4](2) -QGate["not"](2) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",1.9174759848570515e-4](2) -QRot["exp(-i%Z)",1.9174759848570515e-4](14) -QGate["not"](3) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-3.834951969714103e-4](3) -QGate["not"](3) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",3.8349519697141029e-4](3) -QRot["exp(-i%Z)",3.8349519697141029e-4](14) -QGate["not"](4) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-7.669903939428206e-4](4) -QGate["not"](4) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",7.6699039394282058e-4](4) -QRot["exp(-i%Z)",7.6699039394282058e-4](14) -QGate["not"](5) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-1.533980787885641e-3](5) -QGate["not"](5) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",1.5339807878856412e-3](5) -QRot["exp(-i%Z)",1.5339807878856412e-3](14) -QGate["not"](6) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-3.067961575771282e-3](6) -QGate["not"](6) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",3.0679615757712823e-3](6) -QRot["exp(-i%Z)",3.0679615757712823e-3](14) -QGate["not"](7) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-6.135923151542565e-3](7) -QGate["not"](7) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",6.1359231515425647e-3](7) -QRot["exp(-i%Z)",6.1359231515425647e-3](14) -QGate["not"](8) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-1.227184630308513e-2](8) -QGate["not"](8) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",1.2271846303085129e-2](8) -QRot["exp(-i%Z)",1.2271846303085129e-2](14) -QGate["not"](9) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-2.454369260617026e-2](9) -QGate["not"](9) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",2.4543692606170259e-2](9) -QRot["exp(-i%Z)",2.4543692606170259e-2](14) -QGate["not"](10) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](10) -QGate["not"](10) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](10) -QRot["exp(-i%Z)",4.9087385212340517e-2](14) -QGate["not"](11) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](11) -QGate["not"](11) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](11) -QRot["exp(-i%Z)",9.8174770424681035e-2](14) -QGate["not"](12) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](12) -QGate["not"](12) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](12) -QRot["exp(-i%Z)",1.9634954084936207e-1](14) -QGate["not"](13) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](13) -QGate["not"](13) with controls=[+14] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](13) -QRot["exp(-i%Z)",3.9269908169872414e-1](14) -QGate["H"](14) with nocontrol -QGate["not"](3) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-1.917475984857051e-4](3) -QGate["not"](3) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",1.9174759848570515e-4](3) -QRot["exp(-i%Z)",1.9174759848570515e-4](15) -QGate["not"](4) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-3.834951969714103e-4](4) -QGate["not"](4) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",3.8349519697141029e-4](4) -QRot["exp(-i%Z)",3.8349519697141029e-4](15) -QGate["not"](5) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-7.669903939428206e-4](5) -QGate["not"](5) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",7.6699039394282058e-4](5) -QRot["exp(-i%Z)",7.6699039394282058e-4](15) -QGate["not"](6) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-1.533980787885641e-3](6) -QGate["not"](6) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",1.5339807878856412e-3](6) -QRot["exp(-i%Z)",1.5339807878856412e-3](15) -QGate["not"](7) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-3.067961575771282e-3](7) -QGate["not"](7) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",3.0679615757712823e-3](7) -QRot["exp(-i%Z)",3.0679615757712823e-3](15) -QGate["not"](8) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-6.135923151542565e-3](8) -QGate["not"](8) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",6.1359231515425647e-3](8) -QRot["exp(-i%Z)",6.1359231515425647e-3](15) -QGate["not"](9) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-1.227184630308513e-2](9) -QGate["not"](9) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",1.2271846303085129e-2](9) -QRot["exp(-i%Z)",1.2271846303085129e-2](15) -QGate["not"](10) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-2.454369260617026e-2](10) -QGate["not"](10) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",2.4543692606170259e-2](10) -QRot["exp(-i%Z)",2.4543692606170259e-2](15) -QGate["not"](11) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-4.908738521234052e-2](11) -QGate["not"](11) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",4.9087385212340517e-2](11) -QRot["exp(-i%Z)",4.9087385212340517e-2](15) -QGate["not"](12) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-9.817477042468103e-2](12) -QGate["not"](12) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",9.8174770424681035e-2](12) -QRot["exp(-i%Z)",9.8174770424681035e-2](15) -QGate["not"](13) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-1.963495408493621e-1](13) -QGate["not"](13) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",1.9634954084936207e-1](13) -QRot["exp(-i%Z)",1.9634954084936207e-1](15) -QGate["not"](14) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",-3.926990816987241e-1](14) -QGate["not"](14) with controls=[+15] with nocontrol -QRot["exp(-i%Z)",3.9269908169872414e-1](14) -QRot["exp(-i%Z)",3.9269908169872414e-1](15) -QGate["H"](15) with nocontrol -Comment["EXIT: qft_big_endian"](0:"qs[15]", 1:"qs[14]", 2:"qs[13]", 3:"qs[12]", 4:"qs[11]", 5:"qs[10]", 6:"qs[9]", 7:"qs[8]", 8:"qs[7]", 9:"qs[6]", 10:"qs[5]", 11:"qs[4]", 12:"qs[3]", 13:"qs[2]", 14:"qs[1]", 15:"qs[0]") -Outputs: 0:Qbit, 1:Qbit, 2:Qbit, 3:Qbit, 4:Qbit, 5:Qbit, 6:Qbit, 7:Qbit, 8:Qbit, 9:Qbit, 10:Qbit, 11:Qbit, 12:Qbit, 13:Qbit, 14:Qbit, 15:Qbit diff --git a/circuits/Fast/Adder8_before b/circuits/benchmarking_circuits/Fast/before/Adder8 similarity index 100% rename from circuits/Fast/Adder8_before rename to circuits/benchmarking_circuits/Fast/before/Adder8 diff --git a/circuits/Fast/QFT8_before b/circuits/benchmarking_circuits/Fast/before/QFT8 similarity index 100% rename from circuits/Fast/QFT8_before rename to circuits/benchmarking_circuits/Fast/before/QFT8 diff --git a/circuits/Fast/QFTAdd8_before b/circuits/benchmarking_circuits/Fast/before/QFTAdd8 similarity index 100% rename from circuits/Fast/QFTAdd8_before rename to circuits/benchmarking_circuits/Fast/before/QFTAdd8 diff --git a/circuits/Fast/adder_8_before b/circuits/benchmarking_circuits/Fast/before/adder_8 similarity index 100% rename from circuits/Fast/adder_8_before rename to circuits/benchmarking_circuits/Fast/before/adder_8 diff --git a/circuits/Fast/barenco_tof_10_before b/circuits/benchmarking_circuits/Fast/before/barenco_tof_10 similarity index 100% rename from circuits/Fast/barenco_tof_10_before rename to circuits/benchmarking_circuits/Fast/before/barenco_tof_10 diff --git a/circuits/Fast/barenco_tof_3_before b/circuits/benchmarking_circuits/Fast/before/barenco_tof_3 similarity index 100% rename from circuits/Fast/barenco_tof_3_before rename to circuits/benchmarking_circuits/Fast/before/barenco_tof_3 diff --git a/circuits/Fast/barenco_tof_4_before b/circuits/benchmarking_circuits/Fast/before/barenco_tof_4 similarity index 100% rename from circuits/Fast/barenco_tof_4_before rename to circuits/benchmarking_circuits/Fast/before/barenco_tof_4 diff --git a/circuits/Fast/barenco_tof_5_before b/circuits/benchmarking_circuits/Fast/before/barenco_tof_5 similarity index 100% rename from circuits/Fast/barenco_tof_5_before rename to circuits/benchmarking_circuits/Fast/before/barenco_tof_5 diff --git a/circuits/Fast/csla_mux_3_original_before b/circuits/benchmarking_circuits/Fast/before/csla_mux_3_original similarity index 100% rename from circuits/Fast/csla_mux_3_original_before rename to circuits/benchmarking_circuits/Fast/before/csla_mux_3_original diff --git a/circuits/Fast/csum_mux_9_corrected_before b/circuits/benchmarking_circuits/Fast/before/csum_mux_9_corrected similarity index 100% rename from circuits/Fast/csum_mux_9_corrected_before rename to circuits/benchmarking_circuits/Fast/before/csum_mux_9_corrected diff --git a/circuits/Fast/gf2^4_mult_before b/circuits/benchmarking_circuits/Fast/before/gf2^4_mult similarity index 100% rename from circuits/Fast/gf2^4_mult_before rename to circuits/benchmarking_circuits/Fast/before/gf2^4_mult diff --git a/circuits/Fast/gf2^5_mult_before b/circuits/benchmarking_circuits/Fast/before/gf2^5_mult similarity index 100% rename from circuits/Fast/gf2^5_mult_before rename to circuits/benchmarking_circuits/Fast/before/gf2^5_mult diff --git a/circuits/Fast/gf2^6_mult_before b/circuits/benchmarking_circuits/Fast/before/gf2^6_mult similarity index 100% rename from circuits/Fast/gf2^6_mult_before rename to circuits/benchmarking_circuits/Fast/before/gf2^6_mult diff --git a/circuits/Fast/gf2^7_mult_before b/circuits/benchmarking_circuits/Fast/before/gf2^7_mult similarity index 100% rename from circuits/Fast/gf2^7_mult_before rename to circuits/benchmarking_circuits/Fast/before/gf2^7_mult diff --git a/circuits/Fast/gf2^8_mult_before b/circuits/benchmarking_circuits/Fast/before/gf2^8_mult similarity index 100% rename from circuits/Fast/gf2^8_mult_before rename to circuits/benchmarking_circuits/Fast/before/gf2^8_mult diff --git a/circuits/Fast/grover_5.qc b/circuits/benchmarking_circuits/Fast/before/grover_5.qc similarity index 100% rename from circuits/Fast/grover_5.qc rename to circuits/benchmarking_circuits/Fast/before/grover_5.qc diff --git a/circuits/Fast/ham15-low.qc b/circuits/benchmarking_circuits/Fast/before/ham15-low.qc similarity index 100% rename from circuits/Fast/ham15-low.qc rename to circuits/benchmarking_circuits/Fast/before/ham15-low.qc diff --git a/circuits/Fast/hwb6.qc b/circuits/benchmarking_circuits/Fast/before/hwb6.qc similarity index 100% rename from circuits/Fast/hwb6.qc rename to circuits/benchmarking_circuits/Fast/before/hwb6.qc diff --git a/circuits/Fast/mod5_4_before b/circuits/benchmarking_circuits/Fast/before/mod5_4 similarity index 100% rename from circuits/Fast/mod5_4_before rename to circuits/benchmarking_circuits/Fast/before/mod5_4 diff --git a/circuits/Fast/mod_mult_55_before b/circuits/benchmarking_circuits/Fast/before/mod_mult_55 similarity index 100% rename from circuits/Fast/mod_mult_55_before rename to circuits/benchmarking_circuits/Fast/before/mod_mult_55 diff --git a/circuits/Fast/mod_red_21_before b/circuits/benchmarking_circuits/Fast/before/mod_red_21 similarity index 100% rename from circuits/Fast/mod_red_21_before rename to circuits/benchmarking_circuits/Fast/before/mod_red_21 diff --git a/circuits/Fast/qcla_adder_10_before b/circuits/benchmarking_circuits/Fast/before/qcla_adder_10 similarity index 100% rename from circuits/Fast/qcla_adder_10_before rename to circuits/benchmarking_circuits/Fast/before/qcla_adder_10 diff --git a/circuits/Fast/qcla_com_7_before b/circuits/benchmarking_circuits/Fast/before/qcla_com_7 similarity index 100% rename from circuits/Fast/qcla_com_7_before rename to circuits/benchmarking_circuits/Fast/before/qcla_com_7 diff --git a/circuits/Fast/qcla_mod_7_before b/circuits/benchmarking_circuits/Fast/before/qcla_mod_7 similarity index 100% rename from circuits/Fast/qcla_mod_7_before rename to circuits/benchmarking_circuits/Fast/before/qcla_mod_7 diff --git a/circuits/Fast/qft_4.qc b/circuits/benchmarking_circuits/Fast/before/qft_4.qc similarity index 100% rename from circuits/Fast/qft_4.qc rename to circuits/benchmarking_circuits/Fast/before/qft_4.qc diff --git a/circuits/Fast/rc_adder_6_before b/circuits/benchmarking_circuits/Fast/before/rc_adder_6 similarity index 100% rename from circuits/Fast/rc_adder_6_before rename to circuits/benchmarking_circuits/Fast/before/rc_adder_6 diff --git a/circuits/Fast/tof_10_before b/circuits/benchmarking_circuits/Fast/before/tof_10 similarity index 100% rename from circuits/Fast/tof_10_before rename to circuits/benchmarking_circuits/Fast/before/tof_10 diff --git a/circuits/Fast/tof_3_before b/circuits/benchmarking_circuits/Fast/before/tof_3 similarity index 100% rename from circuits/Fast/tof_3_before rename to circuits/benchmarking_circuits/Fast/before/tof_3 diff --git a/circuits/Fast/tof_4_before b/circuits/benchmarking_circuits/Fast/before/tof_4 similarity index 100% rename from circuits/Fast/tof_4_before rename to circuits/benchmarking_circuits/Fast/before/tof_4 diff --git a/circuits/Fast/tof_5_before b/circuits/benchmarking_circuits/Fast/before/tof_5 similarity index 100% rename from circuits/Fast/tof_5_before rename to circuits/benchmarking_circuits/Fast/before/tof_5 diff --git a/circuits/Fast/vbe_adder_3_before b/circuits/benchmarking_circuits/Fast/before/vbe_adder_3 similarity index 100% rename from circuits/Fast/vbe_adder_3_before rename to circuits/benchmarking_circuits/Fast/before/vbe_adder_3 diff --git a/circuits/Fast/Adder8_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/Adder8 similarity index 100% rename from circuits/Fast/Adder8_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/Adder8 diff --git a/circuits/Fast/QFT16_after b/circuits/benchmarking_circuits/Fast/nrscm/QFT16 similarity index 100% rename from circuits/Fast/QFT16_after rename to circuits/benchmarking_circuits/Fast/nrscm/QFT16 diff --git a/circuits/Fast/QFT8_after b/circuits/benchmarking_circuits/Fast/nrscm/QFT8 similarity index 100% rename from circuits/Fast/QFT8_after rename to circuits/benchmarking_circuits/Fast/nrscm/QFT8 diff --git a/circuits/Fast/QFTAdd8_after b/circuits/benchmarking_circuits/Fast/nrscm/QFTAdd8 similarity index 100% rename from circuits/Fast/QFTAdd8_after rename to circuits/benchmarking_circuits/Fast/nrscm/QFTAdd8 diff --git a/circuits/Fast/adder_8_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/adder_8 similarity index 100% rename from circuits/Fast/adder_8_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/adder_8 diff --git a/circuits/Fast/barenco_tof_10_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/barenco_tof_10 similarity index 100% rename from circuits/Fast/barenco_tof_10_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/barenco_tof_10 diff --git a/circuits/Fast/barenco_tof_3_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/barenco_tof_3 similarity index 100% rename from circuits/Fast/barenco_tof_3_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/barenco_tof_3 diff --git a/circuits/Fast/barenco_tof_4_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/barenco_tof_4 similarity index 100% rename from circuits/Fast/barenco_tof_4_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/barenco_tof_4 diff --git a/circuits/Fast/barenco_tof_5_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/barenco_tof_5 similarity index 100% rename from circuits/Fast/barenco_tof_5_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/barenco_tof_5 diff --git a/circuits/Fast/csla_mux_3_original_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/csla_mux_3_original similarity index 100% rename from circuits/Fast/csla_mux_3_original_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/csla_mux_3_original diff --git a/circuits/Fast/csum_mux_9_corrected_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/csum_mux_9_corrected similarity index 100% rename from circuits/Fast/csum_mux_9_corrected_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/csum_mux_9_corrected diff --git a/circuits/Fast/gf2^4_mult_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/gf2^4_mult similarity index 100% rename from circuits/Fast/gf2^4_mult_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/gf2^4_mult diff --git a/circuits/Fast/gf2^5_mult_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/gf2^5_mult similarity index 100% rename from circuits/Fast/gf2^5_mult_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/gf2^5_mult diff --git a/circuits/Fast/gf2^6_mult_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/gf2^6_mult similarity index 100% rename from circuits/Fast/gf2^6_mult_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/gf2^6_mult diff --git a/circuits/Fast/gf2^7_mult_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/gf2^7_mult similarity index 100% rename from circuits/Fast/gf2^7_mult_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/gf2^7_mult diff --git a/circuits/Fast/gf2^8_mult_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/gf2^8_mult similarity index 100% rename from circuits/Fast/gf2^8_mult_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/gf2^8_mult diff --git a/circuits/Fast/mod5_4_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/mod5_4 similarity index 100% rename from circuits/Fast/mod5_4_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/mod5_4 diff --git a/circuits/Fast/mod_mult_55_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/mod_mult_55 similarity index 100% rename from circuits/Fast/mod_mult_55_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/mod_mult_55 diff --git a/circuits/Fast/mod_red_21_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/mod_red_21 similarity index 100% rename from circuits/Fast/mod_red_21_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/mod_red_21 diff --git a/circuits/Fast/qcla_adder_10_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/qcla_adder_10 similarity index 100% rename from circuits/Fast/qcla_adder_10_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/qcla_adder_10 diff --git a/circuits/Fast/qcla_com_7_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/qcla_com_7 similarity index 100% rename from circuits/Fast/qcla_com_7_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/qcla_com_7 diff --git a/circuits/Fast/qcla_mod_7_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/qcla_mod_7 similarity index 100% rename from circuits/Fast/qcla_mod_7_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/qcla_mod_7 diff --git a/circuits/Fast/rc_adder_6_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/rc_adder_6 similarity index 100% rename from circuits/Fast/rc_adder_6_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/rc_adder_6 diff --git a/circuits/Fast/tof_10_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/tof_10 similarity index 100% rename from circuits/Fast/tof_10_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/tof_10 diff --git a/circuits/Fast/tof_3_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/tof_3 similarity index 100% rename from circuits/Fast/tof_3_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/tof_3 diff --git a/circuits/Fast/tof_4_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/tof_4 similarity index 100% rename from circuits/Fast/tof_4_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/tof_4 diff --git a/circuits/Fast/tof_5_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/tof_5 similarity index 100% rename from circuits/Fast/tof_5_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/tof_5 diff --git a/circuits/Fast/vbe_adder_3_after_heavy b/circuits/benchmarking_circuits/Fast/nrscm/vbe_adder_3 similarity index 100% rename from circuits/Fast/vbe_adder_3_after_heavy rename to circuits/benchmarking_circuits/Fast/nrscm/vbe_adder_3 diff --git a/circuits/Fast/adder_8_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/adder_8.qc similarity index 100% rename from circuits/Fast/adder_8_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/adder_8.qc diff --git a/circuits/Fast/barenco_tof_10_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/barenco_tof_10.qc similarity index 100% rename from circuits/Fast/barenco_tof_10_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/barenco_tof_10.qc diff --git a/circuits/Fast/barenco_tof_3_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/barenco_tof_3.qc similarity index 100% rename from circuits/Fast/barenco_tof_3_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/barenco_tof_3.qc diff --git a/circuits/Fast/barenco_tof_4_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/barenco_tof_4.qc similarity index 100% rename from circuits/Fast/barenco_tof_4_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/barenco_tof_4.qc diff --git a/circuits/Fast/barenco_tof_5_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/barenco_tof_5.qc similarity index 100% rename from circuits/Fast/barenco_tof_5_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/barenco_tof_5.qc diff --git a/circuits/Fast/csla_mux_3_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/csla_mux_3.qc similarity index 100% rename from circuits/Fast/csla_mux_3_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/csla_mux_3.qc diff --git a/circuits/Fast/csum_mux_9_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/csum_mux_9.qc similarity index 100% rename from circuits/Fast/csum_mux_9_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/csum_mux_9.qc diff --git a/circuits/Fast/gf2^4_mult_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/gf2^4_mult.qc similarity index 100% rename from circuits/Fast/gf2^4_mult_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/gf2^4_mult.qc diff --git a/circuits/Fast/gf2^5_mult_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/gf2^5_mult.qc similarity index 100% rename from circuits/Fast/gf2^5_mult_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/gf2^5_mult.qc diff --git a/circuits/Fast/gf2^6_mult_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/gf2^6_mult.qc similarity index 100% rename from circuits/Fast/gf2^6_mult_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/gf2^6_mult.qc diff --git a/circuits/Fast/gf2^7_mult_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/gf2^7_mult.qc similarity index 100% rename from circuits/Fast/gf2^7_mult_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/gf2^7_mult.qc diff --git a/circuits/Fast/gf2^8_mult_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/gf2^8_mult.qc similarity index 100% rename from circuits/Fast/gf2^8_mult_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/gf2^8_mult.qc diff --git a/circuits/Fast/grover_5_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/grover_5.qc similarity index 100% rename from circuits/Fast/grover_5_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/grover_5.qc diff --git a/circuits/Fast/mod5_4_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/mod5_4.qc similarity index 100% rename from circuits/Fast/mod5_4_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/mod5_4.qc diff --git a/circuits/Fast/mod_mult_55_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/mod_mult_55.qc similarity index 100% rename from circuits/Fast/mod_mult_55_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/mod_mult_55.qc diff --git a/circuits/Fast/mod_red_21_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/mod_red_21.qc similarity index 100% rename from circuits/Fast/mod_red_21_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/mod_red_21.qc diff --git a/circuits/Fast/qcla_adder_10_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/qcla_adder_10.qc similarity index 100% rename from circuits/Fast/qcla_adder_10_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/qcla_adder_10.qc diff --git a/circuits/Fast/qcla_com_7_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/qcla_com_7.qc similarity index 100% rename from circuits/Fast/qcla_com_7_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/qcla_com_7.qc diff --git a/circuits/Fast/qcla_mod_7_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/qcla_mod_7.qc similarity index 100% rename from circuits/Fast/qcla_mod_7_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/qcla_mod_7.qc diff --git a/circuits/Fast/rc_adder_6_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/rc_adder_6.qc similarity index 100% rename from circuits/Fast/rc_adder_6_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/rc_adder_6.qc diff --git a/circuits/Fast/tof_10_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/tof_10.qc similarity index 100% rename from circuits/Fast/tof_10_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/tof_10.qc diff --git a/circuits/Fast/tof_3_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/tof_3.qc similarity index 100% rename from circuits/Fast/tof_3_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/tof_3.qc diff --git a/circuits/Fast/tof_4_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/tof_4.qc similarity index 100% rename from circuits/Fast/tof_4_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/tof_4.qc diff --git a/circuits/Fast/tof_5_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/tof_5.qc similarity index 100% rename from circuits/Fast/tof_5_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/tof_5.qc diff --git a/circuits/Fast/vbe_adder_3_tpar.qc b/circuits/benchmarking_circuits/Fast/tpar/vbe_adder_3.qc similarity index 100% rename from circuits/Fast/vbe_adder_3_tpar.qc rename to circuits/benchmarking_circuits/Fast/tpar/vbe_adder_3.qc diff --git a/circuits/Slow/Adder16_before b/circuits/benchmarking_circuits/Slow/before/Adder16 similarity index 100% rename from circuits/Slow/Adder16_before rename to circuits/benchmarking_circuits/Slow/before/Adder16 diff --git a/circuits/Slow/Adder32_before b/circuits/benchmarking_circuits/Slow/before/Adder32 similarity index 100% rename from circuits/Slow/Adder32_before rename to circuits/benchmarking_circuits/Slow/before/Adder32 diff --git a/circuits/Slow/Adder64_before b/circuits/benchmarking_circuits/Slow/before/Adder64 similarity index 100% rename from circuits/Slow/Adder64_before rename to circuits/benchmarking_circuits/Slow/before/Adder64 diff --git a/circuits/Slow/QFT32_before b/circuits/benchmarking_circuits/Slow/before/QFT32 similarity index 100% rename from circuits/Slow/QFT32_before rename to circuits/benchmarking_circuits/Slow/before/QFT32 diff --git a/circuits/Slow/QFTAdd16_before b/circuits/benchmarking_circuits/Slow/before/QFTAdd16 similarity index 100% rename from circuits/Slow/QFTAdd16_before rename to circuits/benchmarking_circuits/Slow/before/QFTAdd16 diff --git a/circuits/Slow/QFTAdd32_before b/circuits/benchmarking_circuits/Slow/before/QFTAdd32 similarity index 100% rename from circuits/Slow/QFTAdd32_before rename to circuits/benchmarking_circuits/Slow/before/QFTAdd32 diff --git a/circuits/Fast/gf2^10_mult_before b/circuits/benchmarking_circuits/Slow/before/gf2^10_mult similarity index 100% rename from circuits/Fast/gf2^10_mult_before rename to circuits/benchmarking_circuits/Slow/before/gf2^10_mult diff --git a/circuits/Slow/gf2^16_mult_before b/circuits/benchmarking_circuits/Slow/before/gf2^16_mult similarity index 100% rename from circuits/Slow/gf2^16_mult_before rename to circuits/benchmarking_circuits/Slow/before/gf2^16_mult diff --git a/circuits/Fast/gf2^9_mult_before b/circuits/benchmarking_circuits/Slow/before/gf2^9_mult similarity index 100% rename from circuits/Fast/gf2^9_mult_before rename to circuits/benchmarking_circuits/Slow/before/gf2^9_mult diff --git a/circuits/Slow/ham15-high.qc b/circuits/benchmarking_circuits/Slow/before/ham15-high.qc similarity index 100% rename from circuits/Slow/ham15-high.qc rename to circuits/benchmarking_circuits/Slow/before/ham15-high.qc diff --git a/circuits/Slow/ham15-med.qc b/circuits/benchmarking_circuits/Slow/before/ham15-med.qc similarity index 100% rename from circuits/Slow/ham15-med.qc rename to circuits/benchmarking_circuits/Slow/before/ham15-med.qc diff --git a/circuits/Slow/hwb8.qc b/circuits/benchmarking_circuits/Slow/before/hwb8.qc similarity index 100% rename from circuits/Slow/hwb8.qc rename to circuits/benchmarking_circuits/Slow/before/hwb8.qc diff --git a/circuits/Slow/mod_adder_1024_before b/circuits/benchmarking_circuits/Slow/before/mod_adder_1024 similarity index 100% rename from circuits/Slow/mod_adder_1024_before rename to circuits/benchmarking_circuits/Slow/before/mod_adder_1024 diff --git a/circuits/Fast/nth_prime6.tfc b/circuits/benchmarking_circuits/Slow/before/nth_prime6.tfc similarity index 100% rename from circuits/Fast/nth_prime6.tfc rename to circuits/benchmarking_circuits/Slow/before/nth_prime6.tfc diff --git a/circuits/Slow/nth_prime8.tfc b/circuits/benchmarking_circuits/Slow/before/nth_prime8.tfc similarity index 100% rename from circuits/Slow/nth_prime8.tfc rename to circuits/benchmarking_circuits/Slow/before/nth_prime8.tfc diff --git a/circuits/Slow/Adder16_after_heavy b/circuits/benchmarking_circuits/Slow/nrscm/Adder16 similarity index 100% rename from circuits/Slow/Adder16_after_heavy rename to circuits/benchmarking_circuits/Slow/nrscm/Adder16 diff --git a/circuits/Slow/Adder32_after_heavy b/circuits/benchmarking_circuits/Slow/nrscm/Adder32 similarity index 100% rename from circuits/Slow/Adder32_after_heavy rename to circuits/benchmarking_circuits/Slow/nrscm/Adder32 diff --git a/circuits/Slow/Adder64_after_heavy b/circuits/benchmarking_circuits/Slow/nrscm/Adder64 similarity index 100% rename from circuits/Slow/Adder64_after_heavy rename to circuits/benchmarking_circuits/Slow/nrscm/Adder64 diff --git a/circuits/Slow/QFT32_after b/circuits/benchmarking_circuits/Slow/nrscm/QFT32 similarity index 100% rename from circuits/Slow/QFT32_after rename to circuits/benchmarking_circuits/Slow/nrscm/QFT32 diff --git a/circuits/Slow/QFTAdd16_after b/circuits/benchmarking_circuits/Slow/nrscm/QFTAdd16 similarity index 100% rename from circuits/Slow/QFTAdd16_after rename to circuits/benchmarking_circuits/Slow/nrscm/QFTAdd16 diff --git a/circuits/Slow/QFTAdd32_after b/circuits/benchmarking_circuits/Slow/nrscm/QFTAdd32 similarity index 100% rename from circuits/Slow/QFTAdd32_after rename to circuits/benchmarking_circuits/Slow/nrscm/QFTAdd32 diff --git a/circuits/Fast/gf2^10_mult_after_heavy b/circuits/benchmarking_circuits/Slow/nrscm/gf2^10_mult similarity index 100% rename from circuits/Fast/gf2^10_mult_after_heavy rename to circuits/benchmarking_circuits/Slow/nrscm/gf2^10_mult diff --git a/circuits/Slow/gf2^16_mult_after_heavy b/circuits/benchmarking_circuits/Slow/nrscm/gf2^16_mult similarity index 100% rename from circuits/Slow/gf2^16_mult_after_heavy rename to circuits/benchmarking_circuits/Slow/nrscm/gf2^16_mult diff --git a/circuits/Fast/gf2^9_mult_after_heavy b/circuits/benchmarking_circuits/Slow/nrscm/gf2^9_mult similarity index 100% rename from circuits/Fast/gf2^9_mult_after_heavy rename to circuits/benchmarking_circuits/Slow/nrscm/gf2^9_mult diff --git a/circuits/Slow/mod_adder_1024_after_heavy b/circuits/benchmarking_circuits/Slow/nrscm/mod_adder_1024 similarity index 100% rename from circuits/Slow/mod_adder_1024_after_heavy rename to circuits/benchmarking_circuits/Slow/nrscm/mod_adder_1024 diff --git a/circuits/Fast/gf2^10_mult_tpar.qc b/circuits/benchmarking_circuits/Slow/tpar/gf2^10_mult.qc similarity index 100% rename from circuits/Fast/gf2^10_mult_tpar.qc rename to circuits/benchmarking_circuits/Slow/tpar/gf2^10_mult.qc diff --git a/circuits/Slow/gf2^16_mult_tpar.qc b/circuits/benchmarking_circuits/Slow/tpar/gf2^16_mult.qc similarity index 100% rename from circuits/Slow/gf2^16_mult_tpar.qc rename to circuits/benchmarking_circuits/Slow/tpar/gf2^16_mult.qc diff --git a/circuits/Fast/gf2^9_mult_tpar.qc b/circuits/benchmarking_circuits/Slow/tpar/gf2^9_mult.qc similarity index 100% rename from circuits/Fast/gf2^9_mult_tpar.qc rename to circuits/benchmarking_circuits/Slow/tpar/gf2^9_mult.qc diff --git a/circuits/Slow/mod_adder_1024_tpar.qc b/circuits/benchmarking_circuits/Slow/tpar/mod_adder_1024.qc similarity index 100% rename from circuits/Slow/mod_adder_1024_tpar.qc rename to circuits/benchmarking_circuits/Slow/tpar/mod_adder_1024.qc diff --git a/demos/AllFeatures.ipynb b/demos/AllFeatures.ipynb index af029675..5875ffa8 100644 --- a/demos/AllFeatures.ipynb +++ b/demos/AllFeatures.ipynb @@ -16,7 +16,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -40,11 +40,11 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ - "fname = os.path.join('..','circuits','Fast', 'mod5_4_before')\n", + "fname = os.path.join('..','circuits','benchmarking_circuits', 'Fast', 'before', 'mod5_4')\n", "circ = zx.Circuit.load(fname)\n", "# Alternatively we could have done:\n", "# circ = zx.Circuit.from_quipper_file(fname)\n", @@ -60,7 +60,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -76,7 +76,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -92,7 +92,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -108,7 +108,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -124,7 +124,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -143,7 +143,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": {}, "outputs": [], "source": [ @@ -152,7 +152,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ @@ -161,7 +161,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, "outputs": [], "source": [ @@ -181,7 +181,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": {}, "outputs": [], "source": [ @@ -200,7 +200,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": {}, "outputs": [], "source": [ @@ -217,7 +217,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": {}, "outputs": [], "source": [ @@ -239,7 +239,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, "outputs": [], "source": [ @@ -305,11 +305,11 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, "outputs": [], "source": [ - "fname = os.path.join('..','circuits','Fast', 'mod5_4_before')\n", + "fname = os.path.join('..','circuits','benchmarking_circuits', 'Fast', 'before', 'mod5_4')\n", "circ = zx.Circuit.load(fname).to_basic_gates()\n", "print(\"original T-count:\", zx.tcount(circ))\n", "zx.draw(circ)" @@ -324,7 +324,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": {}, "outputs": [], "source": [ @@ -350,7 +350,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 17, "metadata": {}, "outputs": [], "source": [ @@ -374,7 +374,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "metadata": {}, "outputs": [], "source": [ @@ -392,7 +392,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 19, "metadata": {}, "outputs": [], "source": [ @@ -413,7 +413,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "metadata": {}, "outputs": [], "source": [ @@ -434,7 +434,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": {}, "outputs": [], "source": [ @@ -459,7 +459,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 22, "metadata": {}, "outputs": [], "source": [ @@ -488,18 +488,18 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "For extracting circuits out of ZX-graphs there is only a single method in PyZX that you have to call: ``zx.extract_circuit``. This method should always work *when dealing with graphs produced by ``full_reduce``.* There is no guarantee that it can extract circuits from arbitrary ZX-diagrams.\n", + "For extracting circuits out of ZX-graphs there is a method in PyZX called: ``zx.extract_circuit``. This method should always work *when dealing with graphs produced by ``full_reduce``.* There is no guarantee that it can extract circuits from arbitrary ZX-diagrams.\n", "\n", "Let's see what this method does when applied to the circuit from the previous section:" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 23, "metadata": {}, "outputs": [], "source": [ - "fname = os.path.join('..','circuits','Fast', 'mod5_4_before')\n", + "fname = os.path.join('..','circuits','benchmarking_circuits', 'Fast', 'before', 'mod5_4')\n", "circ = zx.Circuit.load(fname).to_basic_gates()\n", "print(\"The original circuit:\")\n", "zx.draw(circ)\n", @@ -526,7 +526,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 24, "metadata": {}, "outputs": [], "source": [ @@ -545,7 +545,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 25, "metadata": {}, "outputs": [], "source": [ @@ -568,7 +568,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 26, "metadata": {}, "outputs": [], "source": [ @@ -589,7 +589,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 27, "metadata": {}, "outputs": [], "source": [ @@ -608,7 +608,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 28, "metadata": {}, "outputs": [], "source": [ @@ -624,7 +624,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 29, "metadata": {}, "outputs": [], "source": [ @@ -650,15 +650,16 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 30, "metadata": {}, "outputs": [], "source": [ - "fname = os.path.join('..','circuits','Fast', 'mod5_4_before')\n", + "fname = os.path.join('..','circuits','benchmarking_circuits', 'Fast', 'before', 'mod5_4')\n", "circ = zx.Circuit.load(fname).to_basic_gates()\n", "print(\"The original circuit:\")\n", "zx.draw(circ)\n", - "g = zx.simplify.teleport_reduce(circ.to_graph(),quiet=True)\n", + "g = circ.to_graph()\n", + "zx.simplify.teleport_reduce(g)\n", "print(\"Circuit after phase teleportation:\")\n", "zx.draw(g)" ] @@ -672,26 +673,204 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 31, "metadata": {}, "outputs": [], "source": [ "zx.compare_tensors(circ, g)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "# Two-qubit gate count optimisation" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To illustrate how the previous strategy can in fact worsen the optimised circuit in terms of total gate count and two-qubit gate count, let's look at another, bigger, circuit:" + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 32, "metadata": {}, - "outputs": [], - "source": [] + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The original circuit:\n", + "Circuit csum_mux_9_corrected on 30 qubits with 448 gates.\n", + " 196 is the T-count\n", + " 252 Cliffords among which\n", + " 168 2-qubit gates (168 CNOT, 0 other) and\n", + " 56 Hadamard gates.\n", + "\n", + "The optimised circuit:\n", + "Circuit on 30 qubits with 496 gates.\n", + " 84 is the T-count\n", + " 412 Cliffords among which\n", + " 349 2-qubit gates (314 CNOT, 35 other) and\n", + " 28 Hadamard gates.\n" + ] + } + ], + "source": [ + "fname = os.path.join('..','circuits','benchmarking_circuits', 'Fast', 'before', 'csum_mux_9_corrected')\n", + "circ = zx.Circuit.load(fname).to_basic_gates()\n", + "print(\"The original circuit:\")\n", + "print(circ.stats(), end='\\n\\n')\n", + "\n", + "g = circ.to_graph()\n", + "zx.simplify.full_reduce(g,quiet=True)\n", + "g.normalize()\n", + "new_circ = zx.extract_circuit(g)\n", + "optimized_circ = zx.optimize.basic_optimization(new_circ.to_basic_gates(),do_swaps=False).to_basic_gates()\n", + "print(\"The optimised circuit:\")\n", + "print(optimized_circ.stats())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As you can see the both the total gate count and two-qubit gate counts have in fact *increased* from the original. The T-count has been significantly reduced, but we could have achieved this by using phase teleportation. Let's see if using ``phase_block_optimize`` can help us here:" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Circuit on 30 qubits with 539 gates.\n", + " 74 is the T-count\n", + " 465 Cliffords among which\n", + " 391 2-qubit gates (319 CNOT, 72 other) and\n", + " 28 Hadamard gates.\n" + ] + } + ], + "source": [ + "final_circ = zx.optimize.phase_block_optimize(optimized_circ).to_basic_gates()\n", + "final_circ = zx.optimize.basic_optimization(final_circ) # We call this again, as it does some extra processing\n", + "print(final_circ.stats())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As demonstrated here, ``phase_block_optimize`` always attempts to reduce T-count, but at the cost of many additional two-qubit gates. \n", + "\n", + "Let's look at a different strategy we can use, specifically with the goal of minimising the number of two-qubit gates (while maintaining the same T-count reductions).\n", + "\n", + "The first step is to apply phase teleportation to our circuit. This allows us to reap the rewards of ``full_reduce`` without the downside of increased gates after extraction." + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Circuit on 30 qubits with 364 gates.\n", + " 84 is the T-count\n", + " 280 Cliffords among which\n", + " 168 2-qubit gates (168 CNOT, 0 other) and\n", + " 56 Hadamard gates.\n" + ] + } + ], + "source": [ + "fname = os.path.join('..','circuits','benchmarking_circuits', 'Fast', 'before', 'csum_mux_9_corrected')\n", + "circ = zx.Circuit.load(fname).to_basic_gates()\n", + "g = circ.to_graph()\n", + "zx.simplify.teleport_reduce(g)\n", + "print(zx.Circuit.from_graph(g).stats())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can then apply a method which *selectively* applies only the transformations of ``full_reduce`` which reduce the two-qubit gate count of the resulting circuit. To do this we must first transform the diagram into what is called graph-like form. This allows us to evaluate graph theoretic properties of the diagram. In particular if we assert that all transformations preserve a property called *cflow*, then instead of the usual extraction function we can use a method called ``simple_extract``. \n", + "\n", + "The method which applies the selective simplification of our ZX-diagram is called ``flow_2Q_simp``. The full details can be found in [Causal flow preserving optimisation of quantum circuits in the ZX-calculus](https://arxiv.org/abs/2312.02793).\n", + "\n", + "Let's apply it and see how it does:" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Circuit on 30 qubits with 288 gates.\n", + " 84 is the T-count\n", + " 204 Cliffords among which\n", + " 140 2-qubit gates (125 CNOT, 15 other) and\n", + " 28 Hadamard gates.\n" + ] + } + ], + "source": [ + "zx.to_graph_like(g)\n", + "zx.flow_2Q_simp(g)\n", + "g.normalize()\n", + "new_circ = zx.extract_simple(g)\n", + "optimized_circ = zx.optimize.basic_optimization(new_circ.to_basic_gates(),do_swaps=False).to_basic_gates()\n", + "print(optimized_circ.stats())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As you can see this significantly reduces the number of gates. This method will *never* increase the number of two-qubit gates. Let us finally confirm its equality:" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 36, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "circ.verify_equality(optimized_circ)" + ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "pyzx", "language": "python", - "name": "python3" + "name": "pyzx" }, "language_info": { "codemirror_mode": { @@ -703,7 +882,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.10" + "version": "3.11.3" } }, "nbformat": 4, diff --git a/demos/Challenge_circuit_extraction.ipynb b/demos/Challenge_circuit_extraction.ipynb index 27617b20..18ae79bf 100644 --- a/demos/Challenge_circuit_extraction.ipynb +++ b/demos/Challenge_circuit_extraction.ipynb @@ -44,7 +44,7 @@ } ], "source": [ - "c = zx.Circuit.load('../circuits/Fast/tof_3_before').to_basic_gates()\n", + "c = zx.Circuit.load('../circuits/benchmarking_circuits/Fast/before/tof_3').to_basic_gates()\n", "g = c.to_graph()\n", "g1 = g.copy()\n", "zx.simplify.full_reduce(g1)\n", @@ -674,20 +674,13 @@ "source": [ "zx.draw(g2)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "pyzx", "language": "python", - "name": "python3" + "name": "pyzx" }, "language_info": { "codemirror_mode": { @@ -699,7 +692,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.4" + "version": "3.11.3" } }, "nbformat": 4, diff --git a/demos/Circuit Optimisation/benchmarking_demo.ipynb b/demos/Circuit Optimisation/benchmarking_demo.ipynb new file mode 100644 index 00000000..d121c80c --- /dev/null +++ b/demos/Circuit Optimisation/benchmarking_demo.ipynb @@ -0,0 +1,3163 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import sys, os, time\n", + "sys.path.append('../..')\n", + "import pyzx as zx\n", + "from benchmarking import Benchmark" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The ``Benchmark`` class provides a convenient wrapper in order to compare different circuit optimisation methods. To use it, we first create our benchmark object. \n", + "\n", + "If a previous object has been saved (using ``Benchmark.save('dirpath')``), we can load this directly from the directory." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "b = Benchmark()\n", + "# b = Benchmark(dirpath='benchmarking_demo_results')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can then load the set of circuits which we want to benchmark. \n", + "\n", + "If we have a set of already optimised circuits via a different routine, we can also load these. \n", + "\n", + "Here we load sets of optimised circuits by the following two routines:\n", + "\n", + "* NRSCM - [Nam, Ross, Su, Childs, Maslov - Automated optimization of large quantum circuits with continuous parameters](https://www.nature.com/articles/s41534-018-0072-4).\n", + "* TPar - [Amy, Maslov, Mosca - Polynomial-time T-depth Optimization of Clifford+T circuits via Matroid Partitioning](https://arxiv.org/abs/1303.2042)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Circuit attributes: ['Qubits', 'Gates', '2Q Count', 'T Count', 't_opt']\n", + "No loaded functions\n", + "Loaded routines: ['TPar', 'NRSCM']\n", + "Loaded circuit groups: ['fast']\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
OriginalNRSCMTPar
fastYYY
\n", + "
" + ], + "text/plain": [ + " Original NRSCM TPar\n", + "fast Y Y Y" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "b.load_circuits(dirname=os.path.join('..', '..', 'circuits', 'benchmarking_circuits', 'Fast', 'before'), group_name='fast')\n", + "b.load_circuits(dirname=os.path.join('..', '..', 'circuits', 'benchmarking_circuits', 'Fast', 'nrscm'), group_name='fast', simp_strategy='NRSCM')\n", + "b.load_circuits(dirname=os.path.join('..', '..', 'circuits', 'benchmarking_circuits', 'Fast', 'tpar'), group_name='fast', simp_strategy='TPar')\n", + "b.show_attributes()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can then define the optimisation functions which we want to benchmark. These should take a circuit as input and output the optimised circuit." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "def basic_optimise(c):\n", + " c1 = zx.basic_optimization(c.copy(), do_swaps=False).to_basic_gates()\n", + " c2 = zx.basic_optimization(c.copy(), do_swaps=True).to_basic_gates()\n", + " if c2.twoqubitcount() < c1.twoqubitcount(): return c2 # As this optimisation algorithm is targetted at reducting H-gates, we use the circuit with the smaller 2-qubit gate count here, either using SWAP rules or not.\n", + " return c1\n", + "\n", + "def clifford_simp(c):\n", + " g = c.to_graph()\n", + " zx.clifford_simp(g, quiet=True)\n", + " c2 = zx.extract_circuit(g).to_basic_gates()\n", + " return basic_optimise(c2)\n", + "\n", + "def full_reduce(c):\n", + " g = c.to_graph()\n", + " zx.full_reduce(g, quiet=True)\n", + " c2 = zx.extract_circuit(g).to_basic_gates()\n", + " return basic_optimise(c2)\n", + "\n", + "def flow_opt(c):\n", + " g = c.to_graph()\n", + " zx.teleport_reduce(g)\n", + " zx.to_graph_like(g)\n", + " zx.flow_2Q_simp(g)\n", + " c2 = zx.extract_simple(g).to_basic_gates()\n", + " return basic_optimise(c2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "These functions can then be added to the benchmark object. If desired they can also be deleted using ``b.del_simplification_funcs(['func'])``.\n", + "\n", + "They can either be run immediately on a group of circuits (default is all), or run afterwards.\n", + "\n", + "The optimised circuits can optionally be verified to equal the original circuit (by using full_reduce on the original + adjoint)." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Processing full-reduce on tof_3 : 0%| | 0/31 [00:00\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
OriginalNRSCMTParcliff-simpflow-optfull-reduce
fastYYYYYY
\n", + "" + ], + "text/plain": [ + " Original NRSCM TPar cliff-simp flow-opt full-reduce\n", + "fast Y Y Y Y Y Y" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "b.add_simplification_func(func=full_reduce, name='full-reduce', groups_to_run=['fast'], verify=True, rerun=False)\n", + "b.add_simplification_func(func=clifford_simp, name='cliff-simp', groups_to_run=['fast'], verify=True, rerun=False)\n", + "\n", + "b.add_simplification_func(func=flow_opt, name='flow-opt', groups_to_run=None)\n", + "b.run(funcs_to_run=['flow-opt'], groups_to_run=['fast'], verify=True, rerun=False)\n", + "\n", + "b.show_attributes()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can then view the results in a dataframe." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
 OriginalNRSCMTParcliff-simpflow-optfull-reduce
 Qubits2Q CountT Count2Q CountT Count2Q CountT Count2Q CountT Countt_opt2Q CountT Countt_opt2Q CountT Countt_opt
Circuits                
Adder8232432669456--2882280.21112561.47160560.39
QFT8856845642--72530.0256420.1978420.02
QFTAdd816184252184112--2211740.121751120.752631120.10
adder_8244093992912158852155203510.432771734.134281730.79
barenco_tof_10191922241301003281002281920.161461000.582011000.29
barenco_tof_3524281816541639240.0120160.0224160.01
barenco_tof_4748563428902867480.0137280.0657280.02
barenco_tof_59728450401324093720.0255400.1374400.03
csla_mux_3_original1580707064--124640.0373620.05164620.04
csum_mux_9_corrected3016819614084--1891680.12140840.40313840.21
gf2^4_mult1299112996832468112960.0394680.16341680.07
gf2^5_mult151541751541155351111701550.061461150.285461150.10
gf2^6_mult182212522211506491502462160.112091500.721,0171500.23
gf2^7_mult213003433002179922173293010.192832171.311,4372170.35
gf2^8_mult244054484052641,2562644483840.283832642.771,9622640.61
grover_59288336--317523322900.132231661.352841660.17
ham15-low17236161----3841470.13214973.22342970.18
hwb67116105----142950.03101750.29140750.05
mod5_4528282816481633220.012180.032780.01
mod_mult_559484940351613765430.0140350.0493350.02
mod_red_21111051197773290731431070.0483730.30159730.06
qcla_adder_10362332381831627371622932120.391821620.533621620.49
qcla_com_72418620313295496952361690.22133950.79251950.55
qcla_mod_7263824132922351,1502495093510.602962373.286232372.92
qft_454669----58670.0244670.0353670.02
rc_adder_6149377714723063126550.0471470.11122470.05
tof_10191021197071232711561030.0778710.17137710.07
tof_3518211415351525190.0015150.0121150.01
tof_4730352223632348310.0124230.0246230.01
tof_5942493031943166430.0133310.0555310.01
vbe_adder_310707050241142481560.0239240.1670240.02
\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "df = b.df(groups=['fast'], routines=['all'], funcs=['all'], atts=['Qubits','2Q Count','T Count', 't_opt'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can also generate graphs based on optimisations of randomly generated circuits." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Processing flow-opt on P_t = 0.15 : 100%|██████████| 11/11 [00:47<00:00, 4.35s/it]\n" + ] + }, + { + "data": { + "image/svg+xml": [ + "\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " 2024-01-17T21:37:10.001497\n", + " image/svg+xml\n", + " \n", + " \n", + " Matplotlib v3.7.2, https://matplotlib.org/\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n" + ], + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "fig = b.Pt_graphs(funcs=['full-reduce', 'flow-opt'], qubits=8, depth=400, cnot_prob=0.3, t_probs=[0.015*i for i in range(11)], ys=['2Q Count', 'T Count', 'Gates'], reps=20, overwrite=True, random_seed=42)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "# b.save('benchmarking_demo_results')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "pyzx", + "language": "python", + "name": "pyzx" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/demos/Circuit Optimisation/benchmarking_demo_results/circuit_groups.pkl b/demos/Circuit Optimisation/benchmarking_demo_results/circuit_groups.pkl new file mode 100644 index 00000000..92aa2a5c Binary files /dev/null and b/demos/Circuit Optimisation/benchmarking_demo_results/circuit_groups.pkl differ diff --git a/demos/Circuit Optimisation/benchmarking_demo_results/circuits.pkl b/demos/Circuit Optimisation/benchmarking_demo_results/circuits.pkl new file mode 100644 index 00000000..fe83d163 Binary files /dev/null and b/demos/Circuit Optimisation/benchmarking_demo_results/circuits.pkl differ diff --git a/demos/Circuit Optimisation/benchmarking_demo_results/funcs.pkl b/demos/Circuit Optimisation/benchmarking_demo_results/funcs.pkl new file mode 100644 index 00000000..e4e8ce61 Binary files /dev/null and b/demos/Circuit Optimisation/benchmarking_demo_results/funcs.pkl differ diff --git a/demos/Circuit Optimisation/benchmarking_demo_results/rand_data.pkl b/demos/Circuit Optimisation/benchmarking_demo_results/rand_data.pkl new file mode 100644 index 00000000..c9f5743d Binary files /dev/null and b/demos/Circuit Optimisation/benchmarking_demo_results/rand_data.pkl differ diff --git a/demos/Circuit Optimisation/benchmarking_demo_results/routines.pkl b/demos/Circuit Optimisation/benchmarking_demo_results/routines.pkl new file mode 100644 index 00000000..7cb0e902 Binary files /dev/null and b/demos/Circuit Optimisation/benchmarking_demo_results/routines.pkl differ diff --git a/demos/Circuit Optimisation/flow_opt_benchmarking.ipynb b/demos/Circuit Optimisation/flow_opt_benchmarking.ipynb new file mode 100644 index 00000000..c5837b68 --- /dev/null +++ b/demos/Circuit Optimisation/flow_opt_benchmarking.ipynb @@ -0,0 +1,1615 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import sys, os, time\n", + "sys.path.append('../..')\n", + "import pyzx as zx\n", + "from benchmarking import Benchmark" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This notebook demonstrates the results from [Causal flow preserving optimisation of quantum circuits in the ZX-calculus](https://arxiv.org/abs/2312.02793)." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "# b = Benchmark()\n", + "b = Benchmark(dirpath='flow_opt_results')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Circuit metrics are benchmarked against those of Nam, Ross, Su, Childs, Maslov (NRSCM) in [Automated optimization of large quantum circuits with continuous parameters](https://www.nature.com/articles/s41534-018-0072-4)." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Circuit attributes: ['Qubits', 'Gates', '2Q Count', 'T Count', 't_opt']\n", + "Loaded functions: ['flow-opt-g0', 'flow-opt-c0', 'flow-opt-c1', 'flow-opt-c2', 'flow-opt-c3', 'flow-opt-c4', 'flow-opt-c5']\n", + "Loaded routines: ['NRSCM']\n", + "Loaded circuit groups: ['fast']\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
OriginalNRSCMflow-opt-c0flow-opt-c1flow-opt-c2flow-opt-c3flow-opt-c4flow-opt-c5flow-opt-g0
fastYYYYYYYYY
\n", + "
" + ], + "text/plain": [ + " Original NRSCM flow-opt-c0 flow-opt-c1 flow-opt-c2 flow-opt-c3 \\\n", + "fast Y Y Y Y Y Y \n", + "\n", + " flow-opt-c4 flow-opt-c5 flow-opt-g0 \n", + "fast Y Y Y " + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "b.load_circuits(dirname=os.path.join('..', '..', 'circuits', 'benchmarking_circuits', 'Fast', 'before'), group_name='fast')\n", + "b.load_circuits(dirname=os.path.join('..', '..', 'circuits', 'benchmarking_circuits', 'Fast', 'nrscm'), group_name='fast', simp_strategy='NRSCM')\n", + "b.show_attributes()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 31/31 [00:00<00:00, 100326.72it/s]\n", + "100%|██████████| 31/31 [00:00<00:00, 677205.33it/s]\n", + "100%|██████████| 31/31 [00:00<00:00, 673696.50it/s]\n", + "100%|██████████| 31/31 [00:00<00:00, 274890.96it/s]\n", + "100%|██████████| 31/31 [00:00<00:00, 812646.40it/s]\n", + "100%|██████████| 31/31 [00:00<00:00, 935420.32it/s]\n", + "100%|██████████| 31/31 [00:00<00:00, 333393.39it/s]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Circuit attributes: ['Qubits', 'Gates', '2Q Count', 'T Count', 't_opt']\n", + "Loaded functions: ['flow-opt-g0', 'flow-opt-c0', 'flow-opt-c1', 'flow-opt-c2', 'flow-opt-c3', 'flow-opt-c4', 'flow-opt-c5']\n", + "Loaded routines: ['NRSCM']\n", + "Loaded circuit groups: ['fast']\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
OriginalNRSCMflow-opt-c0flow-opt-c1flow-opt-c2flow-opt-c3flow-opt-c4flow-opt-c5flow-opt-g0
fastYYYYYYYYY
\n", + "
" + ], + "text/plain": [ + " Original NRSCM flow-opt-c0 flow-opt-c1 flow-opt-c2 flow-opt-c3 \\\n", + "fast Y Y Y Y Y Y \n", + "\n", + " flow-opt-c4 flow-opt-c5 flow-opt-g0 \n", + "fast Y Y Y " + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "def basic_optimise(c):\n", + " c1 = zx.basic_optimization(c.copy(), do_swaps=False).to_basic_gates()\n", + " c2 = zx.basic_optimization(c.copy(), do_swaps=True).to_basic_gates()\n", + " if c2.twoqubitcount() < c1.twoqubitcount(): return c2 # As this optimisation algorithm is targetted at reducting H-gates, we use the circuit with the smaller 2-qubit gate count here, either using SWAP rules or not.\n", + " return c1\n", + "\n", + "for flow,smax in [('g',0), ('c',0), ('c',1), ('c',2), ('c',3), ('c',4), ('c',5)]:\n", + " def flow_opt(c):\n", + " g = c.to_graph()\n", + " zx.teleport_reduce(g)\n", + " zx.to_graph_like(g)\n", + " zx.flow_2Q_simp(g, cFlow=flow=='c', max_lc_unfusions=smax, max_p_unfusions=smax)\n", + " if flow == 'c': c2 = zx.extract_simple(g).to_basic_gates()\n", + " else: c2 = zx.extract_circuit(g).to_basic_gates()\n", + " return basic_optimise(c2)\n", + " \n", + " b.add_simplification_func(func=flow_opt, name=f'flow-opt-{flow}{smax}', groups_to_run=['fast'], verify=True, rerun=False)\n", + "\n", + "b.show_attributes()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
 OriginalNRSCMflow-opt-c0flow-opt-c1flow-opt-c2flow-opt-c3flow-opt-c4flow-opt-c5flow-opt-g0
 Qubits2Q CountT Count2Q CountT Count2Q CountT Count2Q CountT Count2Q CountT Count2Q CountT Count2Q CountT Count2Q CountT Count2Q CountT Count
Circuits                   
Adder823243266945612456115561125610856108561085612456
QFT88568456425642564256425642484245424242
QFTAdd816184252184112176112176112175112174112149112135112165112
adder_824409399291215295173284173277173269173267173268173296173
barenco_tof_1019192224130100159100151100146100146100146100146100159100
barenco_tof_35242818162116211620162016201620162116
barenco_tof_47485634284228372837283728372837284228
barenco_tof_59728450406340574055405540554055406340
csla_mux_3_original15807070647462736273627362736273627462
csum_mux_9_corrected301681961408415284150841408414084140841408415184
gf2^4_mult129911299689968996894689468946894689868
gf2^5_mult15154175154115154115154115146115146115146115146115153115
gf2^6_mult18221252221150221150221150209150209150209150209150217150
gf2^7_mult21300343300217300217300217283217283217283217283217293217
gf2^8_mult24405448405264405264405264383264383264383264383264395264
grover_59288336--228166228166223166219166220166212166228166
ham15-low17236161--23097224972149720897212972139723097
hwb67116105--987510275101759875987598759775
mod5_4528282816258238218218218218238
mod_mult_559484940354035403540354035403540354035
mod_red_211110511977738773867383738373837383738573
qcla_adder_1036233238183162200162189162182162180162174162175162200162
qcla_com_7241862031329513695134951339513395131951319513695
qcla_mod_726382413292235312237310237296237293237293237292237312237
qft_454669--4567446744674467446744674567
rc_adder_614937771477147714771477147714771477147
tof_101910211970717871787178717871787178717871
tof_35182114151515151515151515151515151515
tof_47303522232423242324232423242324232423
tof_59424930313331333133313331333133313331
vbe_adder_310707050244624442439244024362436244624
\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "df = b.df(groups=['fast'], routines=['all'], funcs=['all'], atts=['Qubits','2Q Count','T Count'])" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
 Originalflow-opt-c0flow-opt-c1flow-opt-c2flow-opt-c3flow-opt-c4flow-opt-c5flow-opt-g0
 Qubitst_optt_optt_optt_optt_optt_optt_opt
Circuits        
Adder8230.440.651.495.1613.0530.530.28
QFT880.020.030.161.5217.2470.570.02
QFTAdd8160.140.180.794.9718.8447.900.12
adder_8240.571.034.2020.0772.87207.770.46
barenco_tof_10190.200.290.622.147.5921.570.18
barenco_tof_350.010.010.020.030.050.080.01
barenco_tof_470.020.030.060.180.651.920.02
barenco_tof_590.040.060.130.481.744.960.03
csla_mux_3_original150.030.040.050.100.290.730.03
csum_mux_9_corrected300.160.220.410.600.800.950.15
gf2^4_mult120.050.070.130.512.277.930.05
gf2^5_mult150.090.120.291.8210.3247.290.08
gf2^6_mult180.160.220.756.1742.76227.310.14
gf2^7_mult210.270.361.3613.59106.82686.690.23
gf2^8_mult240.470.622.7933.13309.612,343.700.40
grover_590.430.561.333.175.988.150.30
ham15-low170.200.463.2817.7854.04579.600.17
hwb670.060.130.300.922.043.900.09
mod5_450.010.020.030.050.080.160.01
mod_mult_5590.020.020.040.060.140.160.02
mod_red_21110.070.120.310.812.666.460.06
qcla_adder_10360.160.240.542.058.6011.860.17
qcla_com_7240.150.230.822.898.0117.260.13
qcla_mod_7260.460.703.2323.73223.861,463.860.36
qft_450.020.020.040.090.070.110.02
rc_adder_6140.040.060.110.260.390.470.04
tof_10190.060.080.170.461.001.400.07
tof_350.010.010.010.020.020.030.01
tof_470.010.010.020.080.080.110.02
tof_590.020.020.050.130.270.400.02
vbe_adder_3100.030.060.200.430.630.820.03
\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "df = b.df(groups=['fast'], routines=['all'], funcs=['all'], atts=['Qubits','t_opt'])" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "b.save('flow_opt_results')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "pyzx", + "language": "python", + "name": "pyzx" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/demos/Circuit Optimisation/flow_opt_results/circuit_groups.pkl b/demos/Circuit Optimisation/flow_opt_results/circuit_groups.pkl new file mode 100644 index 00000000..92aa2a5c Binary files /dev/null and b/demos/Circuit Optimisation/flow_opt_results/circuit_groups.pkl differ diff --git a/demos/Circuit Optimisation/flow_opt_results/circuits.pkl b/demos/Circuit Optimisation/flow_opt_results/circuits.pkl new file mode 100644 index 00000000..225c9f6c Binary files /dev/null and b/demos/Circuit Optimisation/flow_opt_results/circuits.pkl differ diff --git a/demos/Circuit Optimisation/flow_opt_results/funcs.pkl b/demos/Circuit Optimisation/flow_opt_results/funcs.pkl new file mode 100644 index 00000000..10607bca Binary files /dev/null and b/demos/Circuit Optimisation/flow_opt_results/funcs.pkl differ diff --git a/demos/Circuit Optimisation/flow_opt_results/rand_data.pkl b/demos/Circuit Optimisation/flow_opt_results/rand_data.pkl new file mode 100644 index 00000000..e2ecf720 --- /dev/null +++ b/demos/Circuit Optimisation/flow_opt_results/rand_data.pkl @@ -0,0 +1 @@ +€}”. \ No newline at end of file diff --git a/demos/Circuit Optimisation/flow_opt_results/routines.pkl b/demos/Circuit Optimisation/flow_opt_results/routines.pkl new file mode 100644 index 00000000..ff14d8e1 Binary files /dev/null and b/demos/Circuit Optimisation/flow_opt_results/routines.pkl differ diff --git a/demos/Circuit Optimisation/qft-opt.ipynb b/demos/Circuit Optimisation/qft-opt.ipynb new file mode 100644 index 00000000..5026d18f --- /dev/null +++ b/demos/Circuit Optimisation/qft-opt.ipynb @@ -0,0 +1,828 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import sys, os, time\n", + "sys.path.append('../..')\n", + "import pyzx as zx\n", + "import numpy as np" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This notebook demonstrates a particularly effective method for optimising QFT circuits, as noted in [https://arxiv.org/abs/2312.02793](https://arxiv.org/abs/2312.02793)." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2Q-count = 56\n", + "Non-Clifford gate count = 84\n" + ] + } + ], + "source": [ + "c_i = zx.Circuit.load(\"../../circuits/benchmarking_circuits/Fast/before/QFT8\")\n", + "zx.draw(c_i)\n", + "print(f'2Q-count = {c_i.twoqubitcount()}')\n", + "print(f'Non-Clifford gate count = {c_i.tcount()}')" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2Q-count = 42\n", + "Non-Clifford gate count = 42\n" + ] + } + ], + "source": [ + "g = c_i.to_graph()\n", + "zx.to_graph_like(g)\n", + "zx.teleport_reduce(g)\n", + "zx.id_fuse_simp(g, quiet=True)\n", + "c_f = zx.extract_circuit(g)\n", + "c_f = zx.basic_optimization(c_f)\n", + "zx.draw(c_f)\n", + "print(f'2Q-count = {c_f.twoqubitcount()}')\n", + "print(f'Non-Clifford gate count = {c_f.tcount()}')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This results in exactly one non-Clifford gate per two-qubit gate.\n", + "\n", + "These two circuits are equal as evidenced by equal matrices. Note that the way the matrices are calculated introduces some rounding errors (e.g. see the 0,0 element below)." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(3.191891195797325e-16+0j)" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "c_i.to_matrix()[0,0] - c_f.to_matrix()[0,0]" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "np.allclose(c_i.to_matrix(), c_f.to_matrix(), atol=1e-10)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "pyzx", + "language": "python", + "name": "pyzx" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/demos/LocalSearch.ipynb b/demos/LocalSearch.ipynb index efc93086..e88d40d2 100644 --- a/demos/LocalSearch.ipynb +++ b/demos/LocalSearch.ipynb @@ -1,5 +1,15 @@ { "cells": [ + { + "cell_type": "markdown", + "id": "86c623f0", + "metadata": {}, + "source": [ + "\n", + "WARNING: This notebook is currently broken.\n", + "" + ] + }, { "cell_type": "code", "execution_count": 1, @@ -76,7 +86,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": {}, "outputs": [ { @@ -206,9 +216,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "pyzx", "language": "python", - "name": "python3" + "name": "pyzx" }, "language_info": { "codemirror_mode": { @@ -220,7 +230,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.5" + "version": "3.11.3" } }, "nbformat": 4, diff --git a/demos/Time Benchmark.ipynb b/demos/Time Benchmark.ipynb index 6f278dc3..53685aed 100644 --- a/demos/Time Benchmark.ipynb +++ b/demos/Time Benchmark.ipynb @@ -16,7 +16,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -31,7 +31,7 @@ "x,y = [],[]\n", "for i in range(1,101):\n", " gate_count = 100+i*23; x.append(gate_count)\n", - " g = zx.generate.CNOT_HAD_PHASE_circuit(qubits=20, gates=gate_count, p_had=0.1, p_t=0.15,clifford=True).to_graph()\n", + " g = zx.generate.CNOT_HAD_PHASE_circuit(qubits=20, depth=gate_count, p_had=0.1, p_t=0.15,clifford=True).to_graph()\n", " t = time.time(); zx.simplify.clifford_simp(g,quiet=True)\n", " y.append(time.time()-t)\n", " if i%10 == 0: print(i,end='. ')" @@ -39,7 +39,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": {}, "outputs": [ { @@ -1277,9 +1277,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "pyzx", "language": "python", - "name": "python3" + "name": "pyzx" }, "language_info": { "codemirror_mode": { @@ -1291,7 +1291,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.4" + "version": "3.11.3" } }, "nbformat": 4, diff --git a/demos/ZXW_demo.ipynb b/demos/ZXW_demo.ipynb index 3812a573..053ae0aa 100644 --- a/demos/ZXW_demo.ipynb +++ b/demos/ZXW_demo.ipynb @@ -21,12 +21,14 @@ "metadata": {}, "outputs": [], "source": [ - "import pyzx as zx" + "import sys; sys.path.append('..')\n", + "import pyzx as zx\n", + "from fractions import Fraction" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": {}, "outputs": [ { @@ -381,7 +383,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -1091,7 +1093,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -1446,7 +1448,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -2140,7 +2142,7 @@ ], "source": [ "# the z spider is a special case of the z box and it can be converted using the z to zbox method\n", - "z_spider = zx.generate.spider(\"Z\", 4, 5, 1/2)\n", + "z_spider = zx.generate.spider(\"Z\", 4, 5, Fraction(1,2))\n", "zx.draw(z_spider)\n", "zx.rules.z_to_z_box(z_spider, zx.rules.match_z_to_z_box(z_spider))\n", "zx.draw(z_spider)" @@ -2148,7 +2150,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": {}, "outputs": [ { @@ -2855,20 +2857,13 @@ "zx.spider_simp(zbox_diag)\n", "zx.draw(zbox_diag)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { "kernelspec": { - "display_name": ".venv", + "display_name": "pyzx", "language": "python", - "name": "python3" + "name": "pyzx" }, "language_info": { "codemirror_mode": { @@ -2880,7 +2875,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.11.3" }, "orig_nbformat": 4 }, diff --git a/demos/hyperpivot.ipynb b/demos/hyperpivot.ipynb index e180ec5a..1e95649f 100644 --- a/demos/hyperpivot.ipynb +++ b/demos/hyperpivot.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 8, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -15,7 +15,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -253,7 +253,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": {}, "outputs": [ { @@ -1349,7 +1349,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -3146,7 +3146,7 @@ "source": [ "h = g.copy()\n", "zx.simplify.spider_simp(h, quiet=True)\n", - "zx.hsimplify.to_hbox(h)\n", + "zx.hsimplify.to_hypergraph_form(h)\n", "m = zx.hrules.match_hpivot(h)\n", "print(m)\n", "display(zx.draw(h,labels=True))\n", @@ -3156,7 +3156,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -3863,7 +3863,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 7, "metadata": {}, "outputs": [ { @@ -3885,60 +3885,51 @@ "cell_type": "code", "execution_count": 9, "metadata": {}, - "outputs": [], - "source": [ - "dir_fast_circuits = os.path.join('..', 'circuits', 'Fast')" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Circuit qubits gates Z H reduced\n", - "tof_3_before 5 9 10 2 True\n", - "qcla_adder_10_before 36 113 72 85 True\n", - "tof_10_before 19 51 38 9 True\n", - "gf2^4_mult_before 12 51 24 20 True\n", - "barenco_tof_4_before 7 18 14 1 True\n", - "qcla_com_7_before 24 95 48 4117 True\n", - "barenco_tof_10_before 19 66 38 1 True\n", - "gf2^7_mult_before 21 153 42 67 True\n", - "gf2^9_mult_before 27 251 54 101 True\n", - "barenco_tof_3_before 5 10 10 1 True\n", - "mod5_4_before 5 15 10 4 True\n", - "QFT16_before 16 586 32 114 True\n", - "vbe_adder_3_before 10 30 20 15 True\n", - "gf2^5_mult_before 15 79 30 31 True\n", - "csum_mux_9_corrected_before 30 56 60 28 True\n", - "adder_8_before 24 216 48 101 True\n", - "QFT8_before 8 148 16 28 True\n", - "mod_red_21_before 11 74 22 51 True\n", - "gf2^8_mult_before 24 213 48 113 True\n", - "rc_adder_6_before 14 68 28 415 True\n", - "gf2^10_mult_before 30 309 60 127 True\n", - "tof_5_before 9 21 18 4 True\n", - "Adder8_before 23 105 46 44 True\n", - "mod_mult_55_before 9 35 18 13 True\n", - "tof_4_before 7 15 14 3 True\n", - "csla_mux_3_original_before 15 50 30 35 True\n", - "qcla_mod_7_before 26 176 52 2057 True\n", - "barenco_tof_5_before 9 26 18 1 True\n", - "gf2^6_mult_before 18 113 36 47 True\n" + "tof_3 5 9 10 2 True\n", + "tof_4 7 15 14 3 True\n", + "csum_mux_9_corrected 30 56 60 28 True\n", + "rc_adder_6 14 68 28 411 True\n", + "tof_5 9 21 18 4 True\n", + "hwb6.qc 7 79 19 144 False\n", + "mod5_4 5 15 10 4 True\n", + "Adder8 23 105 46 44 True\n", + "qcla_adder_10 36 113 72 85 True\n", + "mod_mult_55 9 35 18 13 True\n", + "qcla_com_7 24 95 48 4117 True\n", + "QFT8 8 148 16 28 True\n", + "gf2^6_mult 18 113 36 47 True\n", + "barenco_tof_4 7 18 14 1 True\n", + "barenco_tof_3 5 10 10 3 True\n", + "barenco_tof_10 19 66 38 9 True\n", + "barenco_tof_5 9 26 18 1 True\n", + "grover_5.qc 9 255 18 20 True\n", + "gf2^7_mult 21 153 42 67 True\n", + "csla_mux_3_original 15 50 30 35 True\n", + "gf2^4_mult 12 51 24 20 True\n", + "qcla_mod_7 26 176 52 2057 True\n", + "gf2^8_mult 24 213 48 113 True\n", + "adder_8 24 216 48 101 True\n", + "gf2^5_mult 15 79 30 31 True\n", + "vbe_adder_3 10 30 20 15 True\n", + "ham15-low.qc 17 167 34 192 True\n", + "mod_red_21 11 74 22 51 True\n", + "tof_10 19 51 38 9 True\n", + "qft_4.qc 5 155 37 25 False\n" ] } ], "source": [ - "d = os.path.join('..', 'circuits', 'Fast')\n", + "d = os.path.join('..', 'circuits', 'benchmarking_circuits', 'Fast', 'before')\n", "print('Circuit'.ljust(30) + ' qubits' + ' gates' + ' Z' + ' H' + ' reduced')\n", "for f in os.listdir(d):\n", " f1 = os.path.join(d,f)\n", " if f.find('QFTAdd8') != -1: continue # takes too long\n", - " if not os.path.isfile(f1) or f.find('before') == -1: continue\n", " print(f.ljust(30), end='')\n", " \n", " c = zx.Circuit.load(f1)\n", @@ -3958,20 +3949,13 @@ " \n", " print(g.qubit_count() * 2 == z)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "pyzx", "language": "python", - "name": "python3" + "name": "pyzx" }, "language_info": { "codemirror_mode": { @@ -3983,7 +3967,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.4" + "version": "3.11.3" } }, "nbformat": 4, diff --git a/doc/api.rst b/doc/api.rst index 2139c27e..2dcbd93e 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -1,6 +1,5 @@ Full API documentation ====================== - Below is listed the documentation for all the supported functions, classes and methods in PyZX. Some functionality of PyZX is still experimental or not well-tested (like the ZH-diagram interface and rewrite rules), so it is not listed here. @@ -21,7 +20,6 @@ Below you can find full documentation of all the functions supplied by a Graph i .. autoclass:: pyzx.graph.base.BaseGraph :members: - Circuit API ----------- @@ -32,20 +30,16 @@ Circuit API .. autoclass:: pyzx.circuit.Circuit :members: - .. _generate: Generating Circuits ------------------- - - The following are some methods to generate (random) quantum circuits. .. automodule:: pyzx.generate :members: :undoc-members: - .. _extract: Circuit extraction and matrices over Z2 @@ -53,37 +47,105 @@ Circuit extraction and matrices over Z2 .. module:: extract -There is basically a single function that is needed for the most general extraction of a circuit from a ZX-diagram: +There is a single function that performs the most general extraction of a circuit from a ZX-diagram: .. autofunction:: pyzx.extract.extract_circuit -This function uses some reasoning over matrices over the field Z2. This functionality is implemented in the following class. +For graphs which admit a causal flow there is a simpler function for circuit extraction: + +.. autofunction:: pyzx.extract.extract_simple + +The function :func:`~pyzx.extract.extract_circuit` uses some reasoning over matrices over the field Z2. This functionality is implemented in the following class. .. autoclass:: pyzx.linalg.Mat2 :members: +The function :func:`~pyzx.extract.extract_simple` uses a phase polynomial synthesis algorithm based on https://arxiv.org/abs/2004.06052 to extract phase gadgets fufilling causal flow conditons. This is implemented in the following function. + +.. autofunction:: pyzx.extract.phase_poly_synth .. _simplify: List of simplifications ----------------------- - Below is listed the content of ``simplify.py``. .. module:: simplify .. automodule:: pyzx.simplify - :members: - :undoc-members: + + The following functions iteratively apply a single rewrite rule frum rules_ using the helper function :func:`~pyzx.simplify.simp` .. autofunction:: simp + .. autofunction:: id_simp + + .. autofunction:: spider_simp + + .. autofunction:: id_fuse_simp + + .. autofunction:: pivot_simp + + .. autofunction:: pivot_boundary_simp + + .. autofunction:: pivot_gadget_simp + + .. autofunction:: lcomp_simp + + .. autofunction:: biagl_simp + + .. autofunction:: gadget_simp + + .. autofunction:: supplementarity_simp + + .. autofunction:: copy_simp + + The following functions iteratively apply a combination of the above functions: + + .. autofunction:: basic_simp + + .. autofunction:: phase_free_simp + + .. autofunction:: interior_clifford_simp + + .. autofunction:: clifford_simp + + .. autofunction:: full_reduce + + .. autofunction:: reduce_scalar + + The following function implements phase teleportation of non-Clifford phases: + + .. autofunction:: teleport_reduce + + The following implements a more selective simplification strategy using the helper function :func:`~pyzx.simplify.selective_simp` + + .. autofunction:: selective_simp + + .. autofunction:: flow_2Q_simp + + .. autofunction:: match_score_2Q_simp + + .. autofunction:: update_2Q_simp_matches + + The following functions perform various useful actions on ZX-diagrams: + + .. autofunction:: to_gh + + .. autofunction:: to_rg + + .. autofunction:: to_graph_like + + .. autofunction:: is_graph_like + + .. autofunction:: to_clifford_normal_form_graph + + .. autofunction:: tcount .. _rules: List of rewrite rules --------------------- - Below is listed the content of ``rules.py``. .. module:: rules @@ -92,13 +154,33 @@ Below is listed the content of ``rules.py``. :members: :undoc-members: +.. _heuristics: + +List of heuristic functions +--------------------------- +Below is listed the content of ``heuristics.py``. +.. module:: heuristics + +.. automodule:: pyzx.heuristics + :members: + :undoc-members: + +.. _flow: + +List of flow functions +---------------------- +Below is listed the content of ``flow.py``. + +.. module:: flow + +.. automodule:: pyzx.flow + :members: .. _optimize: List of optimization functions ------------------------------ - Below is listed the content of ``optimize.py``. .. module:: optimize @@ -107,12 +189,10 @@ Below is listed the content of ``optimize.py``. :members: :undoc-members: - .. _routing: List of routing functions ------------------------------ - Below is listed the content of ``routing.py``. .. module:: routing @@ -126,7 +206,6 @@ Below is listed the content of ``routing.py``. Functions for dealing with tensors ---------------------------------- - Below is listed the content of ``tensor.py``. .. module:: tensor @@ -140,7 +219,6 @@ Below is listed the content of ``tensor.py``. Drawing ------- - Below is listed the content of ``drawing.py``. .. module:: drawing @@ -152,7 +230,6 @@ Below is listed the content of ``drawing.py``. Tikz and Quantomatic functionality ---------------------------------- - .. _tikz: Below is listed the content of ``tikz.py``. diff --git a/pyzx/__init__.py b/pyzx/__init__.py index a204d297..91a98b52 100644 --- a/pyzx/__init__.py +++ b/pyzx/__init__.py @@ -40,6 +40,8 @@ from . import hrules from . import optimize from . import simplify +from . import heuristics +from . import flow from . import hsimplify from . import d3 from . import tikz diff --git a/pyzx/extract.py b/pyzx/extract.py index cecdb3fc..a038b8ce 100644 --- a/pyzx/extract.py +++ b/pyzx/extract.py @@ -15,12 +15,12 @@ # limitations under the License. __all__ = ['extract_circuit', 'extract_simple', 'graph_to_swaps', 'extract_clifford_normal_form', - 'lookahead_extract_base', 'lookahead_full', 'lookahead_fast', 'lookahead_extract'] + 'lookahead_extract_base', 'lookahead_full', 'lookahead_fast', 'lookahead_extract', 'phase_poly_synth'] from fractions import Fraction import itertools -from .utils import EdgeType, VertexType, toggle_edge +from .utils import EdgeType, VertexType, toggle_edge, FractionLike from .linalg import Mat2, Z2 from .simplify import id_simp, tcount,full_reduce from .rules import apply_rule, pivot, match_spider_parallel, spider @@ -30,6 +30,7 @@ from .graph.base import BaseGraph, VT, ET from typing import List, Optional, Tuple, Dict, Set, Union, Iterator +from typing_extensions import Literal def bi_adj(g: BaseGraph[VT,ET], vs:List[VT], ws:List[VT]) -> Mat2: @@ -710,75 +711,160 @@ def extract_circuit( return graph_to_swaps(g, up_to_perm) + c -def extract_simple(g: BaseGraph[VT, ET], up_to_perm: bool = True) -> Circuit: - """A simplified circuit extractor that works on graphs with a causal flow (e.g. graphs arising - from circuits via spider fusion). +def extract_simple(g: BaseGraph[VT, ET], up_to_perm: bool = False, synth_phase_polys: bool = False) -> Circuit: + """A simplified circuit extractor that only works on graphs with causal flow. + Also works with an extension of causal flow that allows phase gadgets under certain conditions. + Phase gadgets are extracted using :func:`phase_poly_synth` - Args: - g: The graph to extract - up_to_perm: If true, returns a circuit that is equivalent to the given graph up to a permutation of the inputs. + :param g: The ZX-diagram to be extracted as a circuit + :param up_to_perm: If True, returns a circuit that is equivalent to the given graph up to a permutation of the inputs, defaults to False + :param synth_phase_polys: If True, synthesises any extractable phase gadgets as phase polynmolials + :return: """ - - progress = True + n_qubits = g.qubit_count() + circ = Circuit(n_qubits) outputs = g.outputs() - circ = Circuit(len(outputs)) - while progress: - progress = False + inputs = g.inputs() + phases = g.phases() + + while True: + progress_made = False + # Extracting output nodes for q, o in enumerate(outputs): - if g.vertex_degree(o) != 1: + on = list(g.neighbors(o)) + if len(on) != 1: raise ValueError("Bad output degree") - v = list(g.neighbors(o))[0] + + v = on[0] e = g.edge(o, v) if g.edge_type(e) == EdgeType.HADAMARD: - progress = True circ.prepend_gate(HAD(q)) g.set_edge_type(e, EdgeType.SIMPLE) - elif (g.type(v) == VertexType.Z or g.type(v) == VertexType.X) and g.vertex_degree(v) == 2: - ns = list(g.neighbors(v)) - w = ns[0] if ns[1] == o else ns[1] - progress = True - - if g.phase(v) != 0: - gate = (ZPhase(q, g.phase(v)) if g.type(v) == VertexType.Z else - XPhase(q, g.phase(v))) - circ.prepend_gate(gate) - - g.add_edge(g.edge(w,o), edgetype=g.edge_type(g.edge(w,v))) + progress_made = True + elif g.type(v) in [VertexType.Z, VertexType.X] and g.vertex_degree(v) == 2: + vn = list(g.neighbors(v)) + w = vn[0] if vn[1] == o else vn[1] + if g.phase(v) != 0: circ.prepend_gate(ZPhase(q, g.phase(v)) if g.type(v) == VertexType.Z else XPhase(q, g.phase(v))) + g.add_edge(g.edge(w, o), edgetype=g.edge_type(g.edge(w, v))) g.remove_vertex(v) + progress_made = True + + if progress_made: continue + + # Extracting pairs of output nodes + for q1, o1 in enumerate(outputs): + for q2, o2 in [(q, o) for q, o in enumerate(outputs) if q > q1]: + v1, v2 = list(g.neighbors(o1))[0], list(g.neighbors(o2))[0] + + if not g.connected(v1, v2): continue + + gate_map = { + (VertexType.Z, VertexType.Z, EdgeType.HADAMARD): CZ, + (VertexType.X, VertexType.X, EdgeType.HADAMARD): XCX, + (VertexType.Z, VertexType.X, EdgeType.SIMPLE): CNOT + } + + gate = gate_map.get((g.type(v1), g.type(v2), g.edge_type(g.edge(v1, v2)))) - if progress: continue + if gate: + circ.prepend_gate(gate(control=q1, target=q2)) + g.remove_edge(g.edge(v1, v2)) + progress_made = True + else: raise ValueError("ZX-diagram is not unitary") + + if progress_made: continue + if not synth_phase_polys: break + + # Extracting phase gadgets + front = {list(g.neighbors(o))[0]: q for q, o in enumerate(outputs)} + gadgets, zphases = [], [] + parity_matrix_T: List[List[Literal[0,1]]] = [] + + for v in g.vertices(): + if v in outputs or v in inputs or g.vertex_degree(v) != 1: continue + + n = list(g.neighbors(v))[0] + if not (g.type(v) == VertexType.Z and g.type(n) == VertexType.Z): continue + if phases[n] not in (0,1): continue + if n in gadgets: continue + if n in outputs or n in inputs: continue + + connected_vertices = set(g.neighbors(n)).difference({v}) + if not connected_vertices.issubset(front): continue + + gadgets.extend([n, v]) + zphases.append(-phases[v] if phases[n] == 1 else phases[v]) + connected_qubits = [front[vertex] for vertex in connected_vertices] + parity_matrix_T.append([1 if q in connected_qubits else 0 for q in range(n_qubits)]) + progress_made = True - for q1,o1 in enumerate(outputs): - for q2,o2 in enumerate(outputs): - if o1 == o2: continue - v1 = list(g.neighbors(o1))[0] - v2 = list(g.neighbors(o2))[0] - if g.connected(v1,v2): - if ((g.type(v1) == g.type(v2) and g.edge_type(g.edge(v1,v2)) == EdgeType.SIMPLE) or - (g.type(v1) != g.type(v2) and g.edge_type(g.edge(v1,v2)) == EdgeType.HADAMARD)): - raise ValueError("ZX diagram is not unitary") - - if g.type(v1) == VertexType.Z and g.type(v2) == VertexType.X: - # CNOT - progress = True - circ.prepend_gate(CNOT(control=q1,target=q2)) - g.remove_edge(g.edge(v1,v2)) - elif g.type(v1) == VertexType.Z and g.type(v2) == VertexType.Z: - # CZ - progress = True - circ.prepend_gate(CZ(control=q1,target=q2)) - g.remove_edge(g.edge(v1,v2)) - elif g.type(v1) == VertexType.X and g.type(v2) == VertexType.X: - # conjugate CZ - progress = True - circ.prepend_gate(XCX(control=q1, target=q2)) - g.remove_edge(g.edge(v1,v2)) + if gadgets: + phase_poly_circ = phase_poly_synth(n_qubits, parity_matrix_T, zphases) + g.remove_vertices(gadgets) + circ = phase_poly_circ + circ + if not progress_made: break return graph_to_swaps(g, up_to_perm) + circ +def phase_poly_synth(n_qubits: int, parity_matrix_T: List[List[Literal[0, 1]]], zphases: List[FractionLike]) -> Circuit: + """Converts a series of phase polynomials into a circuit, utilising Gray codes. + Based on pseudocode in https://arxiv.org/abs/2004.06052. + + :param n_qubits: Number of qubits + :param parity_matrix_T: Transpose of parity matrix describing support of phase polynomials + :param zphases: List of phases belonging to each phase polynomial + :return: + """ + circ = Circuit(n_qubits) + undo_circ = Circuit(n_qubits) + parity_matrix = [list(e) for e in zip(*parity_matrix_T)] + + def reduce_columns(columns: List[int]) -> List[int]: + reduced_cols = [] + for col in columns: + if len([row for row in parity_matrix if row[col]==1])==1: + qubit = max(range(len(parity_matrix)), key=lambda q: parity_matrix[q][col]) + circ.add_gate(ZPhase(qubit, zphases[col])) + else: reduced_cols.append(col) + return reduced_cols + + def base_recursion_step(cols: List[int], rows: List[int]) -> None: + if not cols or not rows: return + chosen_row = max(rows, key=lambda row: max([len([col for col in cols if parity_matrix[row][col]==0]),len([col for col in cols if parity_matrix[row][col]==1])])) + cols0 = [col for col in cols if parity_matrix[chosen_row][col]==0] + cols1 = [col for col in cols if parity_matrix[chosen_row][col]==1] + base_recursion_step(cols0, [row for row in rows if row != chosen_row]) + ones_recursion_step(cols1, rows, chosen_row) + + def ones_recursion_step(cols: List[int], rows: List[int], chosen_row: int) -> None: + if not cols: return + if len(rows)==1 and chosen_row in rows: return + other_rows = [row for row in rows if row != chosen_row] + n = max(other_rows, key=lambda row: len([col for col in cols if parity_matrix[row][col]==1])) + if len([col for col in cols if parity_matrix[n][col]==1]) > 0: + place_CNOT(chosen_row, n) + cols = reduce_columns(cols) + else: + place_CNOT(n, chosen_row) + place_CNOT(chosen_row, n) + cols0 = [col for col in cols if parity_matrix[chosen_row][col]==0] + cols1 = [col for col in cols if parity_matrix[chosen_row][col]==1] + base_recursion_step(cols0, other_rows) + ones_recursion_step(cols1, rows, chosen_row) + + def place_CNOT(control: int, target: int) -> None: + circ.add_gate(CNOT(control, target)) + undo_circ.prepend_gate(CNOT(control, target)) + parity_matrix[control] = [sum(x)%2 for x in zip(parity_matrix[control], parity_matrix[target])] + + columns = reduce_columns(list(range(len(parity_matrix[0])))) + base_recursion_step(columns, list(range(n_qubits))) + return circ + undo_circ + + def graph_to_swaps(g: BaseGraph[VT, ET], no_swaps: bool = False) -> Circuit: """Converts a graph containing only normal and Hadamard edges (i.e., no vertices other than inputs and outputs) into a circuit of Hadamard and SWAP gates. If 'no_swaps' is True, only add diff --git a/pyzx/flow.py b/pyzx/flow.py new file mode 100644 index 00000000..ddedb3f2 --- /dev/null +++ b/pyzx/flow.py @@ -0,0 +1,295 @@ +# PyZX - Python library for quantum circuit rewriting +# and optimization using the ZX-calculus +# Copyright (C) 2018 - Aleks Kissinger and John van de Wetering + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains functions to calculate flows of graph-like ZX-diagrams.""" + +from typing import Dict, Set, Tuple, Optional, List, FrozenSet, Sequence, Union +from math import comb + +from .linalg import Mat2 +from .graph.base import BaseGraph, VertexType, VT, ET + +def gflow(g: BaseGraph[VT, ET]) -> Optional[Tuple[Dict[VT, int], Dict[VT, Set[VT]], int]]: + """Computes the maximally delayed gflow of a diagram in graph-like form. + Based on an algorithm by Perdrix and Mhalla. + See dx.doi.org/10.1007/978-3-540-70575-8_70 + + :param g: A graph-like ZX-diagram. + :return: Returns None if a gflow does not exist. + Otherwise returns A 3-tuple containing an order labelling, the successor function of the gflow, and the maximum depth reached. + """ + order: Dict[VT, int] = {} + gflow: Dict[VT, Set[VT]] = {} + + inputs = set(g.inputs()) + processed = set(g.outputs()) | g.grounds() + vertices = set(g.vertices()) + pattern_inputs = {inp if g.type(inp) != VertexType.BOUNDARY else n for inp in inputs for n in g.neighbors(inp)} + + depth = 1 + order.update({v: 0 for v in processed}) + + while True: + neighbors_not_processed = {v: {w for w in g.neighbors(v) if w not in processed} for v in processed} + processed_prime = list(set(v for v, neighbors in neighbors_not_processed.items() if neighbors).difference(pattern_inputs)) + candidates = [v for v in vertices - processed if any(w in processed_prime for w in g.neighbors(v))] + + zerovec = Mat2([[0] for _ in candidates]) + m = Mat2([[1 if g.connected(v,w) else 0 for v in processed_prime] for w in candidates]) + + for idx, u in enumerate(candidates): + vu = zerovec.copy() + vu.data[idx] = [1] + x = m.solve(vu) + if x: + gflow[u] = {processed_prime[i] for i in range(x.rows()) if x.data[i][0]} + order[u] = depth + + if not order.keys() - processed: + if vertices.difference(processed) == inputs.difference(pattern_inputs): + return order, gflow, depth + else: + processed.update(order.keys()) + depth += 1 + +def cflow(g: BaseGraph[VT, ET], full_path_info: bool = False) -> Union[Optional[Tuple[Dict[VT, int], Dict[VT,VT], int]],Optional[Tuple[Dict[VT,VT], Dict[VT,VT], Dict[VT,int], Dict[Tuple[VT,VT], int]]]]: + """Computes the causal flow of a diagram in graph-like form. + If ``full_path_info`` is False (the default) the flow is calculated based on an algorithm by + Perdrix and Mhalla (see dx.doi.org/10.1007/978-3-540-70575-8_70) in O(kn) for n=|V| and k=|I|=|O|. This will output an order + labelling, a successor function and the maximum depth reached. + + If ``full_path_info`` is set to True, then the flow is calculated based + on an extended version of the algorithm by Niel de Beaudrap (see https://doi.org/10.48550/arXiv.quant-ph/0603072) in O(k^2n). + This will output an order labelling, a successor function, a path labelling and a supremum function. This characterises the full + chain decomposition of the Dipaths. + + If the diagram has phase gadgets ``full_path_info`` is required to be True in order for causal flow to be calculated and checked. + + :param g: A graph-like ZX-diagram. + :param full_path_info: Whether to calculate the full chain decomposition of the flow. + :return: Returns None if a causal flow does not exist. + If ``full_path_info`` is False returns a 3-tuple containing an order labelling, the successor function of the flow, and the maximum depth reached. + If ``full_path_info`` is True returns a 4-tuple containing an order labelling, a successor function, a path labelling and a supremum function. + """ + if full_path_info: + return full_cflow(g) + + inputs = set(g.inputs()) + processed = set(g.outputs()) + vertices = set(g.vertices()) + num_vertices = len(vertices) + non_inputs = vertices - inputs + correctors = processed - inputs + + order: Dict[VT, int] = {v:0 for v in processed} + flow: Dict[VT, VT] = {} + + neighbor_sets = {v: set(g.neighbors(v)) for v in vertices} + + depth = 1 + while True: + out_prime = set() + c_prime = set() + + for v in correctors: + ns = neighbor_sets[v] - processed + if len(ns) == 1: + u = ns.pop() + if v != u: + flow[u] = v + order[v] = depth + out_prime.add(u) + c_prime.add(v) + + if not out_prime: + if len(processed) == num_vertices: + return order, flow, depth + return None + + processed.update(out_prime) + correctors.difference_update(c_prime) + correctors.update(out_prime & non_inputs) + depth += 1 + +def full_cflow(g: BaseGraph[VT, ET]) -> Optional[Tuple[Dict[VT,VT], Dict[VT,VT], Dict[VT,int], Dict[Tuple[VT,VT],int]]]: + """Calculates the full chain decomposition for causal flow as per https://doi.org/10.48550/arXiv.quant-ph/0603072. + This has been extended to check for phase gadgets in an extentsion to the definition of causal flow which allows self loops on gadgets.""" + + gadgets = {} + gadget_connections = {} + phases = g.phases() + + inputs = g.inputs() + outputs = g.outputs() + + for v in g.vertices(): + if v in inputs or v in outputs or g.vertex_degree(v) != 1: continue + + n = next(iter(g.neighbors(v))) + + if g.type(v) != VertexType.Z or g.type(n) != VertexType.Z: continue + if phases[n] not in (0,1): continue + if n in gadgets or n in inputs or n in outputs: continue + + gadgets[n] = v + gadget_connections[n] = frozenset(set(g.neighbors(n)) - {v}) + + g_without_gadgets = g.clone() + g_without_gadgets.remove_vertices(set(gadgets.keys()).union(set(gadgets.values()))) + + for v in inputs: + if v in outputs: + g_without_gadgets.remove_vertex(v) + + if not g_without_gadgets.vertices(): return None + + num_inputs = g_without_gadgets.num_inputs() + num_vertices = g_without_gadgets.num_vertices() + if g_without_gadgets.num_edges() > (num_inputs * num_vertices - comb(num_inputs+1, 2)): return None # Prerequisite for causal flow + + path_cover = build_path_cover(g_without_gadgets) + if not path_cover: return None + + successor_function, P, L = get_chain_decomp(g_without_gadgets, path_cover) + sup = compute_suprema(g_without_gadgets, successor_function, P, L) + if not sup: return None + + for n in gadgets.keys(): + connecting = gadget_connections[n] + for m in gadgets.keys(): + connecting_m = gadget_connections[m] + first = None + for v_n in connecting: + for v_m in connecting_m: + if v_n == v_m: continue + if v_n not in P.keys() or v_m not in P.keys(): return None # gadgets are connected + if n == m and P[v_n] == P[v_m]: return None + if v_n in g.inputs() or v_m in g.inputs(): continue + if sup[(P[path_cover.prev(v_m)], v_n)] <= L[path_cover.prev(v_m)]: #v_n < F.prev(v_m) + if first == 'm': return None + first = 'n' + if sup[(P[path_cover.prev(v_n)], v_m)] <= L[path_cover.prev(v_n)]: #v_m < F.prev(v_n) + if first == 'n': return None + first = 'm' + return successor_function, P, L, sup + +class Dipaths: + """Class for handling dipaths, used for calculating causal flow""" + def __init__(self, vertices: Sequence[VT]) -> None: + self.vertices: Dict[VT, bool] = {v: False for v in vertices} + self.arcs: Dict[VT, List[List[VT]]] = {v: [[],[]] for v in vertices} + def prev(self, v): + return next(iter(self.arcs[v][0]), []) + def next(self, v): + return next(iter(self.arcs[v][1]), []) + def add_arc(self, v, w): + self.arcs[v][1].append(w) + self.arcs[w][0].append(v) + self.vertices[v] = True + self.vertices[w] = True + def del_arc(self, v, w): + self.arcs[v][1].remove(w) + if not self.arcs[v][0]: self.vertices[v] = False + self.arcs[w][0].remove(v) + if not self.arcs[w][1]: self.vertices[w] = False + +def build_path_cover(g: BaseGraph[VT, ET]) -> Optional[Dipaths]: + """Tries to build a path cover for g""" + F = Dipaths(g.vertices()) # Collection of vertex disjoint Dipaths in G + visited = {v: 0 for v in g.vertices()} + i = 0 + for inp in g.inputs(): + i += 1 + F, visited, success = augment_search(g, F, i, visited, inp) + if not success: return None + if len([v for v in g.vertices() if not F.vertices[v]]) == 0: return F + else: return None + +def augment_search(g: BaseGraph[VT, ET], F: Dipaths, iter: int, visited: Dict[VT,int], v: VT) -> Tuple[Dipaths, Dict[VT, int], bool]: + """Searches for an output vertex along pre-alternating walks for F starting at v, subject to limitations on the end-points of the search paths""" + visited[v] = iter + if v in g.outputs(): return(F, visited, True) + if F.vertices[v] and v not in g.inputs() and visited[F.prev(v)] < iter: + F, visited, success = augment_search(g, F, iter, visited, F.prev(v)) + if success: + F.del_arc(F.prev(v),v) + return F, visited, True + for w in g.neighbors(v): + if visited[w] < iter and w not in g.inputs() and F.next(v) != w: + if not F.vertices[w]: + F, visited, success = augment_search(g, F, iter, visited, w) + if success: + F.add_arc(v,w) + return F, visited, True + elif visited[F.prev(w)] < iter: + F, visited, success = augment_search(g, F, iter, visited, F.prev(w)) + if success: + F.del_arc(F.prev(w),w) + F.add_arc(v,w) + return F, visited, True + return F, visited, False + +def get_chain_decomp(g: BaseGraph[VT, ET], C: Dipaths) -> Tuple[Dict[VT,VT], Dict[VT,VT], Dict[VT,int]]: + """Obtain the successor function f of the path cover C, and obtain functions describing the chain decomposition of the influencing digraph""" + P: Dict[VT, VT] = {} + L: Dict[VT, int] = {v:0 for v in g.vertices()} + f: Dict[VT, VT] = {} + for inp in g.inputs(): + l = 0 + v = inp + while v not in g.outputs(): + try: f[v] = C.next(v) + except: raise Exception(f'Vertex: {v}') + P[v] = inp + L[v] = l + if C.next(v)==None: print(v) + v = C.next(v) + l += 1 + P[v] = inp + L[v] = l + return f, P, L + +def compute_suprema(g: BaseGraph[VT, ET], f: Dict[VT,VT], P: Dict[VT,VT], L: Dict[VT,int]) -> Optional[Dict[Tuple[VT, VT], int]]: + """Compute the natural pre-order for successor function f in the form of a supremum function and functions characterising C""" + sup, status = init_status(g,P,L) + for v in [v for v in g.vertices() if v not in g.outputs()]: + if not status[v]: sup, status = traverse_infl_walk(g,f,sup,status,v) + if status[v] == 'pending': return None + return sup + +def init_status(g: BaseGraph[VT, ET], P: Dict[VT,VT], L: Dict[VT,int]) -> Tuple[Dict[Tuple[VT, VT], int],Dict[VT, Optional[Union[bool, str]]]]: + """Initialise the supremum function, and the status of each vertex""" + sup: Dict[Tuple[VT,VT],int] = {} + status: Dict[VT,Optional[Union[bool,str]]] = {v:None for v in g.vertices()} + for v in g.vertices(): + for inp in g.inputs(): + if inp == P[v]: sup[(inp,v)] = L[v] + else: sup[(inp,v)]=g.num_vertices() + if v in g.outputs(): status[v]=True + return sup, status + +def traverse_infl_walk(g: BaseGraph[VT, ET], f: Dict[VT,VT], sup: Dict[Tuple[VT, VT], int], status: Dict[VT, Optional[Union[bool, str]]], v: VT) -> Tuple[Dict[Tuple[VT, VT], int], Dict[VT, Optional[Union[bool, str]]]]: + """Compute the suprema of v and all of it's descedants, by traversing influencing walks from v""" + status[v] = 'pending' + for w in list(g.neighbors(f[v]))+[f[v]]: + if w != v: + if not status[w]: sup, status = traverse_infl_walk(g,f,sup,status,w) + if status[w] == 'pending': return sup, status + else: + for inp in g.inputs(): + if sup[(inp,v)] > sup[(inp,w)]: sup[(inp,v)] = sup[(inp,w)] + status[v] = True + return sup, status \ No newline at end of file diff --git a/pyzx/gflow.py b/pyzx/gflow.py deleted file mode 100644 index d8956c54..00000000 --- a/pyzx/gflow.py +++ /dev/null @@ -1,117 +0,0 @@ -# PyZX - Python library for quantum circuit rewriting -# and optimization using the ZX-calculus -# Copyright (C) 2018 - Aleks Kissinger and John van de Wetering - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" - Based on algorithm by Perdrix and Mhalla. Here is the pseudocode from -dx.doi.org/10.1007/978-3-540-70575-8_70 - -``` -input : An open graph -output: A generalised flow - -gFlow (V,Gamma,In,Out) = -begin - for all v in Out do - l(v) := 0 - end - return gFlowaux (V,Gamma,In,Out,1) -end - -gFlowaux (V,Gamma,In,Out,k) = -begin - C := {} - for all u in V \ Out do - Solve in F2 : Gamma[V \ Out, Out \ In] * I[X] = I[{u}] - if there is a solution X0 then - C := C union {u} - g(u) := X0 - l(u) := k - end - end - if C = {} then - return (Out = V,(g,l)) - else - return gFlowaux (V, Gamma, In, Out union C, k + 1) - end -end -``` -""" - -from typing import Dict, Set, Tuple, Optional - -from .extract import bi_adj -from .linalg import Mat2 -from .graph.base import BaseGraph, VertexType, VT, ET - - -def gflow( - g: BaseGraph[VT, ET] -) -> Optional[Tuple[Dict[VT, int], Dict[VT, Set[VT]], int]]: - """Compute the maximally delayed gflow of a diagram in graph-like form. - - Based on algorithm by Perdrix and Mhalla. - See dx.doi.org/10.1007/978-3-540-70575-8_70 - """ - l: Dict[VT, int] = {} - gflow: Dict[VT, Set[VT]] = {} - - inputs: Set[VT] = set(g.inputs()) - processed: Set[VT] = set(g.outputs()) | g.grounds() - vertices: Set[VT] = set(g.vertices()) - pattern_inputs: Set[VT] = set() - for inp in inputs: - if g.type(inp) == VertexType.BOUNDARY: - pattern_inputs |= set(g.neighbors(inp)) - else: - pattern_inputs.add(inp) - k: int = 1 - - for v in processed: - l[v] = 0 - - while True: - correct = set() - # unprocessed = list() - processed_prime = [ - v - for v in processed.difference(pattern_inputs) - if any(w not in processed for w in g.neighbors(v)) - ] - candidates = [ - v - for v in vertices.difference(processed) - if any(w in processed_prime for w in g.neighbors(v)) - ] - - zerovec = Mat2([[0] for i in range(len(candidates))]) - # print(unprocessed, processed_prime, zerovec) - m = bi_adj(g, processed_prime, candidates) - for u in candidates: - vu = zerovec.copy() - vu.data[candidates.index(u)] = [1] - x = m.solve(vu) - if x: - correct.add(u) - gflow[u] = {processed_prime[i] for i in range(x.rows()) if x.data[i][0]} - l[u] = k - - if not correct: - if vertices.difference(processed) == inputs.difference(pattern_inputs): - return l, gflow, k - return None - else: - processed.update(correct) - k += 1 diff --git a/pyzx/graph/base.py b/pyzx/graph/base.py index ee80e403..0c333638 100644 --- a/pyzx/graph/base.py +++ b/pyzx/graph/base.py @@ -23,6 +23,7 @@ from typing_extensions import Literal, GenericMeta # type: ignore # https://github.com/python/mypy/issues/5753 import numpy as np +import random from ..utils import EdgeType, VertexType, get_z_box_label, set_z_box_label, toggle_edge, vertex_is_z_like, vertex_is_zx, toggle_vertex, vertex_is_w, get_w_partner, vertex_is_zx_like from ..utils import FloatInt, FractionLike @@ -65,7 +66,7 @@ def pack_indices(lst: List[FloatInt]) -> Mapping[FloatInt,int]: return d VT = TypeVar('VT', bound=int) # The type that is used for representing vertices (e.g. an integer) -ET = TypeVar('ET') # The type used for representing edges (e.g. a pair of integers) +ET = TypeVar('ET', bound=Tuple[int,int]) # The type used for representing edges (e.g. a pair of integers) class BaseGraph(Generic[VT, ET], metaclass=DocstringMeta): """Base class for letting graph backends interact with PyZX. @@ -77,18 +78,21 @@ class BaseGraph(Generic[VT, ET], metaclass=DocstringMeta): def __init__(self) -> None: self.scalar: Scalar = Scalar() - # self.inputs: List[VT] = [] - # self.outputs: List[VT] = [] - #Data necessary for phase tracking for phase teleportation - self.track_phases: bool = False - self.phase_index : Dict[VT,int] = dict() # {vertex:index tracking its phase for phase teleportation} - self.phase_master: Optional['simplify.Simplifier'] = None - self.phase_mult: Dict[int,Literal[1,-1]] = dict() - self.max_phase_index: int = -1 - self._vdata: Dict[VT,Dict[str,Any]] = dict() + + # Tracker for phase teleportation and simplifications + self.phase_tracking: bool = False + self.phase_teleporter: Optional['simplify.PhaseTeleporter'] = None + self.parent_vertex: Dict[VT, VT] = {} + self.vertex_groups: Dict[VT, int] = {} + self.group_data: Dict[int, Set[VT]] = {} + self.phase_sum: Dict[int, FractionLike] = {} + self.phase_mult: Dict[VT, int] = {} + self.vertex_rank: Dict[VT, int] = {} + self.vertices_to_update: List[VT] = [] # merge_vdata(v0,v1) is an optional, custom function for merging # vdata of v1 into v0 during spider fusion etc. + self._vdata: Dict[VT,Dict[str,Any]] = dict() self.merge_vdata: Optional[Callable[[VT,VT], None]] = None self.variable_types: Dict[str,bool] = dict() # mapping of variable names to their type (bool or continuous) @@ -136,13 +140,11 @@ def copy(self, adjoint:bool=False, backend:Optional[str]=None) -> 'BaseGraph': if (backend is None): backend = type(self).backend g = Graph(backend = backend) - g.track_phases = self.track_phases g.scalar = self.scalar.copy() g.merge_vdata = self.merge_vdata mult:int = 1 if adjoint: mult = -1 - #g.add_vertices(self.num_vertices()) ty = self.types() ph = self.phases() qs = self.qubits() @@ -182,14 +184,10 @@ def adjoint(self) -> 'BaseGraph': def clone(self) -> 'BaseGraph': """ - This method should return an identical copy of the graph, without any relabeling - - FIXME: this currently *does* change lables. - - Used in lookahead extraction. + Returns an identical copy of the graph, without any relabeling """ - return self.copy() - + raise NotImplementedError("Not implemented on backend " + type(self).backend) + def map_qubits(self, qubit_map:Mapping[int,Tuple[float,float]]) -> None: for v in self.vertices(): q = self.qubit(v) @@ -671,10 +669,6 @@ def add_vertex(self, self.set_phase(v, phase) if ground: self.set_ground(v, True) - if self.track_phases: - self.max_phase_index += 1 - self.phase_index[v] = self.max_phase_index - self.phase_mult[self.max_phase_index] = 1 return v def add_vertex_indexed(self,v:VT) -> None: @@ -825,38 +819,241 @@ def add_edge_smart(self, e: ET, edgetype: EdgeType.Type): """Like add_edge, but does the right thing if there is an existing edge.""" self.add_edge_table({e : [1,0] if edgetype == EdgeType.SIMPLE else [0,1]}) - def set_phase_master(self, m: 'simplify.Simplifier') -> None: - """Points towards an instance of the class :class:`~pyzx.simplify.Simplifier`. - Used for phase teleportation.""" - self.phase_master = m - - def update_phase_index(self, old:VT, new:VT) -> None: - """When a phase is moved from a vertex to another vertex, - we need to tell the phase_teleportation algorithm that this has happened. - This function does that. Used in some of the rules in `simplify`.""" - if not self.track_phases: return - i = self.phase_index[old] - self.phase_index[old] = self.phase_index[new] - self.phase_index[new] = i - - def fuse_phases(self, p1: VT, p2: VT) -> None: - if p1 not in self.phase_index or p2 not in self.phase_index: + def set_phase_teleporter(self, teleporter: 'simplify.PhaseTeleporter', fusing_mode: bool = True) -> None: + """Used for phase teleportation. + If ``fusing_mode`` is True then phases will be tracked as the graph is simplified. + Otherwise info about previously teleported phases from ``teleporter`` is stored, but not placed on the graph yet. + They will then be placed throughout simplification when required, or through the function :func:`place_tracked_phases` + + :param teleporter: Instance of the class :class:`~pyzx.simplify.PhaseTeleporter` + :param fusing_mode: Defaults to True + """ + self.phase_tracking = True + + if fusing_mode: + self.phase_teleporter = teleporter return - if self.phase_master is not None: - self.phase_master.fuse_phases(self.phase_index[p1],self.phase_index[p2]) - self.phase_index[p2] = self.phase_index[p1] + + for group_num, group in enumerate(teleporter.get_vertex_groups()): + if len(group) == 1: continue + self.group_data[group_num] = set(group) # Groups of vertices fused throughout teleportation + phase_sum: FractionLike = Fraction(0) + for v in group: + self.vertex_rank[v] = teleporter.vertex_rank[v] + self.vertex_groups[v] = group_num + mult = teleporter.phase_mult[v] + self.phase_mult[v] = mult # Associated teleportation phase multiplier + phase_sum += self.phase(v) * mult + self.set_phase(v, 0) # Set all stored phases to zero for now + self.phase_sum[group_num] = phase_sum # Phase sum for each group + + def remove_vertex_from_group(self, v: VT, group: int) -> None: + """Used for post phase teleportation simplifications. + Removes ``v`` from ``group`` then updates group when required. + + :param v: + :param group: + """ + del self.vertex_groups[v] + del self.phase_mult[v] + + group_data = self.group_data[group] + group_data.remove(v) + group_len = len(group_data) + + if group_len == 1: + u = next(iter(group_data)) + phase = self.phase_sum[group] * self.phase_mult[u] + child_u = self.leaf_vertex(u) + + self.add_to_phase(child_u, phase) + + del self.vertex_groups[u] + del self.phase_mult[u] + del self.phase_sum[group] + del self.group_data[group] + self.vertices_to_update.append(child_u) + + if not self.group_data: self.phase_tracking = False # Turn off phase tracking + + elif group_len == 2: + self.vertices_to_update.extend(self.leaf_vertex(u) for u in group_data) # Some pivots may need to be rechecked + + def place_tracked_phases(self, allow_jumping=False) -> None: + """Used for phase teleportation. + Places any stored phases onto the graph. + ``allow_jumping`` defines whether any additional phases are permitted to teleport around during simplification. + """ + for group, vertices in list(self.group_data.items()): + v = max(vertices, key = self.vertex_rank.__getitem__) + phase = self.phase_sum[group] * self.phase_mult[v] + child_v = self.leaf_vertex(v) + if not allow_jumping: + self.add_to_phase(child_v, phase) + continue + current_phase = self.phase(child_v) + self.fix_phase(child_v, current_phase, current_phase + phase) + + if allow_jumping: return + self.vertex_groups.clear() + self.group_data.clear() + self.phase_sum.clear() + self.phase_mult.clear() + self.phase_tracking = False + + def root_vertex(self, v: VT) -> VT: + """Used for phase teleportation. + Returns the root vertex from the original graph. + + :param v: + :return: Either the vertex itself or the vertex from the original graph which it points to. + """ + while v in self.parent_vertex: v = self.parent_vertex[v] + return v + + def leaf_vertex(self, v: VT) -> VT: + """Used for phase teleportation. + Returns the child vertex of a vertex in the current graph. - def phase_negate(self, v: VT) -> None: - if v not in self.phase_index: return - index = self.phase_index[v] - mult = self.phase_mult[index] - if mult == 1: self.phase_mult[index] = -1 - else: self.phase_mult[index] = 1 - #self.phase_mult[index] = -1*mult + :param v: + :return: Either the vertex itself or the vertex in the current graph which points to it. + """ + for child, parent in self.parent_vertex.items(): + if parent == v: return self.leaf_vertex(child) + return v + + def fuse_phases(self, v1: VT, v2: VT) -> None: + """Used for phase teleportation. + Tracks the fusing of vertex ``v2`` into ``v1``. - def vertex_from_phase_index(self, i: int) -> VT: - return list(self.phase_index.keys())[list(self.phase_index.values()).index(i)] + :param v1: Surviving vertex + :param v2: Vertex to be deleted + """ + root_v1 = self.root_vertex(v1) + root_v2 = self.root_vertex(v2) + + if self.phase_teleporter: # Fusing mode + if root_v2 in self.phase_teleporter.non_clifford_vertices: + if root_v1 in self.phase_teleporter.non_clifford_vertices: + self.phase_teleporter.fuse_phases(root_v1,root_v2) + else: self.parent_vertex[v1] = v2 # v1 now points to v2 (a non-Clifford vertex) + return + + group_1 = self.vertex_groups.get(root_v1) + group_2 = self.vertex_groups.get(root_v2) + if group_2 is not None: + if group_1 is not None: + if group_1 == group_2: self.remove_vertex_from_group(root_v2, group_2) + # The below handling of the case when group_1 != group_2 is not optimal + elif len(self.group_data[group_1]) <= len(self.group_data[group_2]): + print('group_1 != group_2') + self.remove_vertex_from_group(root_v2, group_2) + else: + print('group_1 != group_2') + self.remove_vertex_from_group(root_v1, group_1) + rem_v = v1 + while rem_v in self.parent_vertex: + parent = self.parent_vertex[rem_v] + del self.parent_vertex[rem_v] + rem_v = parent + self.parent_vertex[v1] = v2 + else: + self.parent_vertex[v1] = v2 # v1 now points to v2 (a phase variable) + + def unfuse_vertex(self, new_vertex: VT, old_vertex: VT) -> None: + """Used for phase teleportation. + Tracks the unfusing of ``old_vertex`` onto ``new_vertex``. + + :param new_vertex: + :param old_vertex: + """ + root_old_vertex = self.root_vertex(old_vertex) + if root_old_vertex in (self.phase_teleporter.non_clifford_vertices if self.phase_teleporter else self.vertex_groups): + self.parent_vertex[new_vertex] = old_vertex + + def phase_negate(self, v: VT) -> None: + """Used for phase teleportation. + Tracks when the sign of a phase has been negated (usually as a gadget). + :param v: Vertex whose sign has been negated + """ + root_v = self.root_vertex(v) + + if self.phase_teleporter: # Fusing mode + if root_v in self.phase_teleporter.non_clifford_vertices: + self.phase_teleporter.phase_negate(root_v) + return + + if root_v in self.vertex_groups: + self.phase_mult[root_v] *= -1 + + def fix_phase(self, v: VT, current_phase: FractionLike, target_phase: FractionLike) -> None: + """Used for post phase teleportation simplifications. + Sets the phase of ``v`` to ``target_phase``, updating the rest of the vertex_group where necessary + + :param v: + :param current_phase: + :param target_phase: + """ + root_v = self.root_vertex(v) + + if root_v not in self.vertex_groups: + assert current_phase == target_phase # In this case the current phase should be the target phase + return + + group = self.vertex_groups[root_v] + self.phase_sum[group] -= self.phase_mult[root_v] * (target_phase - current_phase) + self.set_phase(v, target_phase) + self.remove_vertex_from_group(root_v, group) + + def check_phase(self, v: VT, current_phase: FractionLike, target_phase: FractionLike) -> bool: + """Used for post phase teleportation simplifications. + Returns a boolean representing whether ``v`` can be fixed to have a phase of ``target_phase``. + + :param v: + :param current_phase: The current phase of ``v`` in the graph + :param target_phase: + :return: + """ + root_v = self.root_vertex(v) + if root_v not in self.vertex_groups: return current_phase == target_phase + return True + + def check_two_pauli_phases(self, v1: VT, v1p: FractionLike, v2: VT, v2p: FractionLike) -> Optional[List[Optional[FractionLike]]]: + """Used for post phase teleportation simplifications. + Checks whether both ``v1`` and ``v2`` can have their phases fixed to a Pauli phase (i.e. for pivoting) + + :param v1: + :param v1p: The current phase of ``v1`` in the graph + :param v2: + :param v2p: The current phase of ``v2`` in the graph + :return: List of the two Pauli phases which the vertices can be fixed to. + If either cannot be fixed then the value of that list element is None. + If either `but only one` vertex can be fixed to Pauli then returns None. + """ + PAULI = {0,1} + + root_v1 = self.root_vertex(v1) + root_v2 = self.root_vertex(v2) + group_1 = self.vertex_groups.get(root_v1) + group_2 = self.vertex_groups.get(root_v2) + + if not group_1 and not group_2: return [v1p if v1p in PAULI else None, v2p if v2p in PAULI else None] + if not group_1: return [v1p if v1p in PAULI else None, 0] + if not group_2: return [0, v2p if v2p in PAULI else None] + + if group_1 == group_2: + if len(self.group_data[group_1]) > 2: return [0,0] # Can place phase on another vertex in group + else: # Calculate the resultant phase v2 would have if v1 was fixed to 0 + new_phase_v2 = v2p + self.phase_mult[root_v2] * (self.phase_sum[group_1] + self.phase_mult[root_v1] * v1p) + if new_phase_v2 in PAULI: return [0, new_phase_v2] + else: return None # Will get identical result if v2 was fixed to 0 and the resultant phase of v1 was calculated + + return [0,0] + + def replace(self, g2) -> None: + """Replaces the metadata in the current graph object with the metadata of ``g2``""" + raise NotImplementedError("Not implemented on backend " + type(self).backend) def remove_vertices(self, vertices: Iterable[VT]) -> None: """Removes the list of vertices from the graph.""" diff --git a/pyzx/graph/graph_s.py b/pyzx/graph/graph_s.py index 3c7477cb..e04093c9 100644 --- a/pyzx/graph/graph_s.py +++ b/pyzx/graph/graph_s.py @@ -60,14 +60,44 @@ def clone(self) -> 'GraphS': cpy.scalar = self.scalar.copy() cpy._inputs = tuple(list(self._inputs)) cpy._outputs = tuple(list(self._outputs)) - cpy.track_phases = self.track_phases - cpy.phase_index = self.phase_index.copy() - cpy.phase_master = self.phase_master + cpy.phase_teleporter = self.phase_teleporter + cpy.phase_tracking = self.phase_tracking + cpy.parent_vertex = self.parent_vertex.copy() + cpy.vertex_groups = self.vertex_groups.copy() + cpy.group_data = {group: set(vertices) for group, vertices in self.group_data.items()} + cpy.phase_sum = self.phase_sum.copy() cpy.phase_mult = self.phase_mult.copy() - cpy.max_phase_index = self.max_phase_index + cpy.vertex_rank = self.vertex_rank.copy() + cpy.vertices_to_update = self.vertices_to_update.copy() return cpy - - def vindex(self): return self._vindex + + def replace(self, g: 'GraphS') -> None: + self.graph = g.graph.copy() + self._vindex = g._vindex + self.nedges = g.nedges + self.ty = g.ty.copy() + self._phase = g._phase.copy() + self._qindex = g._qindex.copy() + self._maxq = g._maxq + self._rindex = g._rindex.copy() + self._maxr = g._maxr + self._vdata = g._vdata.copy() + self.scalar = g.scalar.copy() + self._inputs = tuple(list(g._inputs)) + self._outputs = tuple(list(g._outputs)) + self.phase_teleporter = g.phase_teleporter + self.phase_tracking = g.phase_tracking + self.parent_vertex = g.parent_vertex.copy() + self.vertex_groups = g.vertex_groups.copy() + self.group_data = {group: set(vertices) for group, vertices in g.group_data.items()} + self.phase_sum = g.phase_sum.copy() + self.phase_mult = g.phase_mult.copy() + self.vertex_rank = g.vertex_rank.copy() + self.vertices_to_update = g.vertices_to_update.copy() + + def vindex(self): + return self._vindex + def depth(self): if self._rindex: self._maxr = max(self._rindex.values()) else: self._maxr = -1 diff --git a/pyzx/heuristics.py b/pyzx/heuristics.py new file mode 100644 index 00000000..575bc27e --- /dev/null +++ b/pyzx/heuristics.py @@ -0,0 +1,125 @@ +# PyZX - Python library for quantum circuit rewriting +# and optimization using the ZX-calculus +# Copyright (C) 2018 - Aleks Kissinger and John van de Wetering + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains heuristics and helper functions for some of the rewrite rules in the rules_ module.""" + +from typing import TYPE_CHECKING, List, Callable, Optional, Union, Generic, Tuple, Dict, Iterator, Any +from typing_extensions import Literal + +from .utils import EdgeType, VertexType, toggle_edge, vertex_is_zx, toggle_vertex +from .graph.base import BaseGraph, VT, ET +from .rules import MatchIdFuseType, MatchLcompUnfuseType, MatchPivotUnfuseType + +def pivot_statistics(g: BaseGraph[VT,ET], v0: VT, v1: VT, neighbours_to_unfuse: Tuple[Tuple[VT,...],...] = ((),())) -> Tuple[int,int]: + """Returns the number of edges and vertices which would be removed under a pivot. + + :param g: The graph on which the pivot would be performed. + :param v0: The first vertex of the pivot. + :param v1: The second vertex of the pivot. + :param neighbours_to_unfuse: 2-tuple containing the neighbours to unfuse onto for the respecive vertices, defaults to ((),()) + :return: 2-tuple containing the number of edges removed and number of vertices removed. + """ + unfuse0, unfuse1 = set(neighbours_to_unfuse[0]), set(neighbours_to_unfuse[1]) + + v0n = set(g.neighbors(v0)) - {v1} - unfuse0 + v1n = set(g.neighbors(v1)) - {v0} - unfuse1 + + shared_n = v0n & v1n + num_shared_n = len(shared_n) + + v0n -= shared_n + v1n -= shared_n + + num_v0n = len(v0n) + bool(unfuse0) + num_v1n = len(v1n) + bool(unfuse1) + + max_new_connections = num_v0n * num_v1n + num_v0n * num_shared_n + num_v1n * num_shared_n + + num_edges_between_neighbours = sum(1 for v in v0n for n in g.neighbors(v) if n in v1n or n in shared_n) + num_edges_between_neighbours += sum(1 for v in v1n for n in g.neighbors(v) if n in shared_n) + + num_unfusions = bool(unfuse0) + bool(unfuse1) + + edges_removed = 2*num_edges_between_neighbours - max_new_connections + num_v0n + 2*num_shared_n + num_v1n + 1 - 2*num_unfusions + vertices_removed = 2 - (2*num_unfusions) + return edges_removed, vertices_removed + +def lcomp_statistics(g: BaseGraph[VT,ET], v: VT, vn: Tuple[VT,...], neighbours_to_unfuse: Tuple[VT,...]) -> Tuple[int,int]: + """Returns the number of edges and vertices which would be removed under a local complementation. + + :param g: The graph on which the local complementation would be performed. + :param v: The vertex of the local complementation. + :param vn: The neighbours of the vertex ``v``. + :param neighbours_to_unfuse: The neighbours to unfuse from the vertex. + :return: 2-tuple containing the number of edges removed and number of vertices removed. + """ + unfuse = set(neighbours_to_unfuse) + vns = set(vn) - unfuse + num_vns = len(vns) + bool(unfuse) + + max_new_connections = (num_vns * (num_vns-1)) // 2 + num_edges_between_neighbours = sum(1 for v1 in vns for v2 in vns if v1 < v2 and g.connected(v1, v2)) + num_unfusions = bool(unfuse) + + edges_removed = 2*num_edges_between_neighbours - max_new_connections + num_vns - (2*num_unfusions) + vertices_removed = 1 - (2*num_unfusions) + return edges_removed, vertices_removed + +def id_fuse_statistics(g: BaseGraph[VT,ET], v: VT, v0: VT, v1: VT) -> Tuple[int,int]: + """Returns the number of edges and vertices which would be removed under an identity fusion. + + :param g: The graph on which identity fusion would be performed. + :param v: The central identity fusion. + :param v0: The first vertex of the fusion. + :param v1: The second vertex of the fusion. + :return: 2-tuple containing the number of edges removed and number of vertices removed. + """ + v0n = set(g.neighbors(v0)) - {v} + v1n = set(g.neighbors(v1)) - {v} + shared_n = v0n & v1n + + same_edge_type_removed = sum(1 for n in shared_n if g.edge_type(g.edge(n,v0)) == g.edge_type(g.edge(n,v1))) + extra_vertices_removed = sum(1 for n in shared_n if g.edge_type(g.edge(n,v0)) == g.edge_type(g.edge(n,v1)) and len(g.neighbors(n)) == 2) + + edges_removed = 2 + same_edge_type_removed + len(shared_n) + if g.connected(v0,v1): edges_removed += 1 + + vertices_removed = 2 + extra_vertices_removed + + return edges_removed, vertices_removed + +def lcomp_2Q_simp_heuristic(g: BaseGraph[VT,ET], match: MatchLcompUnfuseType, weight: float) -> Optional[float]: + """Returns the score heuristic for a local complementation match""" + edges_removed, vertices_removed = lcomp_statistics(g, match[0], match[1], match[2]) + twoQ_removed = edges_removed - vertices_removed + if twoQ_removed > 0: return weight*twoQ_removed + if twoQ_removed == 0 and vertices_removed > 0: return weight*twoQ_removed + return None + +def pivot_2Q_simp_heuristic(g: BaseGraph[VT,ET], match: MatchPivotUnfuseType, weight: float) -> Optional[float]: + """Returns the score heuristic for a pivot match""" + edges_removed, vertices_removed = pivot_statistics(g, match[0], match[1], match[2]) + twoQ_removed = edges_removed - vertices_removed + if twoQ_removed > 0: return weight*twoQ_removed + if twoQ_removed == 0 and vertices_removed > 0: return weight*twoQ_removed + return None + +def id_fuse_2Q_reduce_heuristic(g: BaseGraph[VT,ET], match: MatchIdFuseType, weight: float) -> float: + """Returns the score heuristic for a identity fusion match""" + edges_removed, vertices_removed = id_fuse_statistics(g, match[0], match[1], match[2]) + twoQ_removed = edges_removed - vertices_removed + assert(twoQ_removed >= 0) + return weight*twoQ_removed \ No newline at end of file diff --git a/pyzx/rules.py b/pyzx/rules.py index 6d6b600f..4d63df45 100644 --- a/pyzx/rules.py +++ b/pyzx/rules.py @@ -68,6 +68,7 @@ def apply_rule( m: List[MatchObject], check_isolated_vertices:bool=True ) -> None: + """Applies a given match of a rule onto a graph""" etab, rem_verts, rem_edges, check_isolated_vertices = rewrite(g, m) g.add_edge_table(etab) g.remove_edges(rem_edges) @@ -81,12 +82,11 @@ def match_bialg(g: BaseGraph[VT,ET]) -> List[MatchBialgType[VT]]: """Does the same as :func:`match_bialg_parallel` but with ``num=1``.""" return match_bialg_parallel(g, num=1) - #TODO: make it be hadamard edge aware def match_bialg_parallel( g: BaseGraph[VT,ET], - matchf:Optional[Callable[[ET],bool]]=None, - num: int=-1 + matchf: Optional[Callable[[ET],bool]] = None, + num: int = -1 ) -> List[MatchBialgType[VT]]: """Finds noninteracting matchings of the bialgebra rule. @@ -128,7 +128,6 @@ def match_bialg_parallel( m.append((v0,v1,v0n,v1n)) return m - def bialg(g: BaseGraph[VT,ET], matches: List[MatchBialgType[VT]]) -> RewriteOutputType[ET,VT]: """Performs a certain type of bialgebra rewrite given matchings supplied by ``match_bialg(_parallel)``.""" @@ -144,6 +143,7 @@ def bialg(g: BaseGraph[VT,ET], matches: List[MatchBialgType[VT]]) -> RewriteOutp return (etab, rem_verts, [], True) + MatchSpiderType = Tuple[VT,VT] def match_spider(g: BaseGraph[VT,ET]) -> List[MatchSpiderType[VT]]: @@ -152,57 +152,59 @@ def match_spider(g: BaseGraph[VT,ET]) -> List[MatchSpiderType[VT]]: def match_spider_parallel( g: BaseGraph[VT,ET], - matchf:Optional[Callable[[ET],bool]]=None, - num:int=-1 + matchf: Optional[Callable[[ET],bool]] = None, + num: int = -1, + allow_interacting_matches: bool = False ) -> List[MatchSpiderType[VT]]: - """Finds non-interacting matchings of the spider fusion rule. - + """Finds matches of the spider fusion rule. + :param g: An instance of a ZX-graph. :param matchf: An optional filtering function for candidate edge, should return True if the edge should be considered for matchings. Passing None will consider all edges. :param num: Maximal amount of matchings to find. If -1 (the default) tries to find as many as possible. + :param allow_interacting_matches: Whether or not to allow matches which overlap, + hence can not all be applied at once. Defaults to False. :rtype: List of 2-tuples ``(v1, v2)`` """ if matchf is not None: candidates = set([e for e in g.edges() if matchf(e)]) else: candidates = g.edge_set() + types = g.types() i = 0 - m = [] + m: List[MatchSpiderType[VT]] = [] while (num == -1 or i < num) and len(candidates) > 0: e = candidates.pop() if g.edge_type(e) != EdgeType.SIMPLE: continue + v0, v1 = g.edge_st(e) - v0t = types[v0] - v1t = types[v1] - if (v0t == v1t and vertex_is_zx(v0t)) or \ - (vertex_is_z_like(v0t) and vertex_is_z_like(v1t)): - i += 1 - for v in g.neighbors(v0): - for c in g.incident_edges(v): candidates.discard(c) - for v in g.neighbors(v1): - for c in g.incident_edges(v): candidates.discard(c) - m.append((v0,v1)) + v0t, v1t = types[v0], types[v1] + if not ((v0t == v1t and vertex_is_zx(v0t)) or \ + (vertex_is_z_like(v0t) and vertex_is_z_like(v1t))): continue + + m.append((v0,v1)) + i += 1 + + if allow_interacting_matches: continue + for n in g.neighbors(v0): + for ne in g.incident_edges(n): candidates.discard(ne) + for n in g.neighbors(v1): + for ne in g.incident_edges(n): candidates.discard(ne) + return m - def spider(g: BaseGraph[VT,ET], matches: List[MatchSpiderType[VT]]) -> RewriteOutputType[ET,VT]: '''Performs spider fusion given a list of matchings from ``match_spider(_parallel)`` ''' rem_verts = [] etab: Dict[ET,List[int]] = dict() - for m in matches: - if g.row(m[0]) == 0: - v0, v1 = m[1], m[0] - else: - v0, v1 = m[0], m[1] - - ground = g.is_ground(v0) or g.is_ground(v1) + for v0, v1 in matches: + if g.row(v0) == 0: v0, v1 = v1, v0 - if ground: + if g.is_ground(v0) or g.is_ground(v1): g.set_phase(v0, 0) g.set_ground(v0) elif g.type(v0) == VertexType.Z_BOX or g.type(v1) == VertexType.Z_BOX: @@ -215,18 +217,16 @@ def spider(g: BaseGraph[VT,ET], matches: List[MatchSpiderType[VT]]) -> RewriteOu else: g.add_to_phase(v0, g.phase(v1)) - if g.track_phases: - g.fuse_phases(v0,v1) + if g.phase_tracking: g.fuse_phases(v0,v1) - # always delete the second vertex in the match - rem_verts.append(v1) + rem_verts.append(v1) # always delete the second vertex in the match - # edges from the second vertex are transferred to the first - for w in g.neighbors(v1): - if v0 == w: continue - e = g.edge(v0,w) + for n in g.neighbors(v1): # edges from the second vertex are transferred to the first + if v0 == n: continue + e = g.edge(v0,n) if e not in etab: etab[e] = [0,0] - etab[e][g.edge_type(g.edge(v1,w))-1] += 1 + etab[e][g.edge_type(g.edge(v1,n))-1] += 1 + return (etab, rem_verts, [], True) def unspider(g: BaseGraph[VT,ET], m: List[Any], qubit:FloatInt=-1, row:FloatInt=-1) -> VT: @@ -366,21 +366,21 @@ def w_fusion(g: BaseGraph[VT,ET], matches: List[MatchSpiderType[VT]]) -> Rewrite return (etab, rem_verts, [], True) -MatchPivotType = Tuple[VT,VT,List[VT],List[VT]] +MatchPivotType = Tuple[VT,VT,Tuple[VT,...],Tuple[VT,...]] def match_pivot(g: BaseGraph[VT,ET]) -> List[MatchPivotType[VT]]: """Does the same as :func:`match_pivot_parallel` but with ``num=1``.""" return match_pivot_parallel(g, num=1, check_edge_types=True) - def match_pivot_parallel( g: BaseGraph[VT,ET], - matchf:Optional[Callable[[ET],bool]]=None, - num:int=-1, - check_edge_types:bool=True + matchf: Optional[Callable[[ET],bool]] = None, + num: int = -1, + check_edge_types: bool = True, + allow_interacting_matches: bool = False ) -> List[MatchPivotType[VT]]: - """Finds non-interacting matchings of the pivot rule. - + """Finds matches of the pivot rule. + :param g: An instance of a ZX-graph. :param num: Maximal amount of matchings to find. If -1 (the default) tries to find as many as possible. @@ -389,184 +389,153 @@ def match_pivot_parallel( :param matchf: An optional filtering function for candidate edge, should return True if a edge should considered as a match. Passing None will consider all edges. + :param allow_interacting_matches: Whether or not to allow matches which overlap, + hence can not all be applied at once. Defaults to False. :rtype: List of 4-tuples. See :func:`pivot` for the details. """ if matchf is not None: candidates = set([e for e in g.edges() if matchf(e)]) else: candidates = g.edge_set() + types = g.types() phases = g.phases() i = 0 - m = [] + m: List[MatchPivotType[VT]] = [] while (num == -1 or i < num) and len(candidates) > 0: e = candidates.pop() if check_edge_types and g.edge_type(e) != EdgeType.HADAMARD: continue + v0, v1 = g.edge_st(e) - if not (types[v0] == VertexType.Z and types[v1] == VertexType.Z): continue - - v0a = phases[v0] - v1a = phases[v1] - if not ((v0a in (0,1)) and (v1a in (0,1))): continue - if g.is_ground(v0) or g.is_ground(v1): - continue - + if any(phases[v] not in (0,1) for v in (v0,v1)): continue + if g.is_ground(v0) or g.is_ground(v1): continue + invalid_edge = False - v0n = list(g.neighbors(v0)) v0b = [] for n in v0n: - et = g.edge_type(g.edge(v0,n)) - if types[n] == VertexType.Z and et == EdgeType.HADAMARD: pass + if types[n] == VertexType.Z and g.edge_type(g.edge(v0,n)) == EdgeType.HADAMARD: pass elif types[n] == VertexType.BOUNDARY: v0b.append(n) else: invalid_edge = True break - if invalid_edge: continue v1n = list(g.neighbors(v1)) v1b = [] for n in v1n: - et = g.edge_type(g.edge(v1,n)) - if types[n] == VertexType.Z and et == EdgeType.HADAMARD: pass + if types[n] == VertexType.Z and g.edge_type(g.edge(v1,n)) == EdgeType.HADAMARD: pass elif types[n] == VertexType.BOUNDARY: v1b.append(n) else: invalid_edge = True break - if invalid_edge: continue if len(v0b) + len(v1b) > 1: continue - + + m.append((v0,v1,tuple(v0b),tuple(v1b))) i += 1 - for v in v0n: - for c in g.incident_edges(v): candidates.discard(c) - for v in v1n: - for c in g.incident_edges(v): candidates.discard(c) - b0 = list(v0b) - b1 = list(v1b) - m.append((v0,v1,b0,b1)) + + if allow_interacting_matches: continue + for n in v0n: + for ne in g.incident_edges(n): candidates.discard(ne) + for n in v1n: + for ne in g.incident_edges(n): candidates.discard(ne) + return m def match_pivot_gadget( - g: BaseGraph[VT,ET], - matchf:Optional[Callable[[ET],bool]]=None, - num:int=-1) -> List[MatchPivotType[VT]]: + g: BaseGraph[VT,ET], + matchf: Optional[Callable[[ET],bool]] = None, + num: int = -1, + allow_interacting_matches: bool = False + ) -> List[MatchPivotType[VT]]: """Like :func:`match_pivot_parallel`, but except for pairings of Pauli vertices, it looks for a pair of an interior Pauli vertex and an interior non-Clifford vertex in order to gadgetize the non-Clifford vertex.""" if matchf is not None: candidates = set([e for e in g.edges() if matchf(e)]) else: candidates = g.edge_set() + types = g.types() phases = g.phases() - rs = g.rows() - - edge_list = [] + i = 0 m: List[MatchPivotType[VT]] = [] while (num == -1 or i < num) and len(candidates) > 0: e = candidates.pop() v0, v1 = g.edge_st(e) - - if not (types[v0] == VertexType.Z and types[v1] == VertexType.Z): continue - - v0a = phases[v0] - v1a = phases[v1] - - if v0a not in (0,1): - if v1a in (0,1): - v0, v1 = v1, v0 - v0a, v1a = v1a, v0a + if not all(types[v] == VertexType.Z for v in (v0,v1)): continue + + if phases[v0] not in (0,1): + if phases[v1] in (0,1): v0, v1 = v1, v0 else: continue - elif v1a in (0,1): continue - # Now v0 has a Pauli phase and v1 has a non-Pauli phase - - if g.is_ground(v0): - continue - + elif phases[v1] in (0,1): continue # Now v0 has a Pauli phase and v1 has a non-Pauli phase + + if g.is_ground(v0): continue + v0n = list(g.neighbors(v0)) v1n = list(g.neighbors(v1)) if len(v1n) == 1: continue # It is a phase gadget + if any(types[n] != VertexType.Z for vn in (v0n,v1n) for n in vn): continue + bad_match = False - discard_edges = [] - for i,l in enumerate((v0n, v1n)): - for n in l: + edges_to_discard = [] + for i, neighbors in enumerate((v0n, v1n)): + for n in neighbors: if types[n] != VertexType.Z: bad_match = True break ne = list(g.incident_edges(n)) - if i==0 and len(ne) == 1 and not (e == ne[0]): # v0 is a phase gadget + if i == 0 and len(ne) == 1 and not (e == ne[0]): # v0 is a phase gadget bad_match = True break - discard_edges.extend(ne) + edges_to_discard.extend(ne) if bad_match: break if bad_match: continue - - if any(types[w]!=VertexType.Z for w in v0n): continue - if any(types[w]!=VertexType.Z for w in v1n): continue - # Both v0 and v1 are interior - - v = g.add_vertex(VertexType.Z,-2,rs[v0],v1a) - g.set_phase(v1, 0) - g.set_qubit(v0,-1) - g.update_phase_index(v1,v) - edge_list.append(g.edge(v,v1)) - - m.append((v0,v1,[],[v])) + + m.append((v0,v1,tuple(),tuple())) i += 1 - for c in discard_edges: candidates.discard(c) - g.add_edges(edge_list,EdgeType.SIMPLE) + + if allow_interacting_matches: continue + for c in edges_to_discard: candidates.discard(c) + return m - def match_pivot_boundary( - g: BaseGraph[VT,ET], - matchf:Optional[Callable[[VT],bool]]=None, - num:int=-1) -> List[MatchPivotType[VT]]: + g: BaseGraph[VT,ET], + matchf: Optional[Callable[[VT],bool]] = None, + num: int=-1, + allow_interacting_matches: bool = False + ) -> List[MatchPivotType[VT]]: """Like :func:`match_pivot_parallel`, but except for pairings of Pauli vertices, it looks for a pair of an interior Pauli vertex and a boundary non-Pauli vertex in order to gadgetize the non-Pauli vertex.""" if matchf is not None: candidates = set([v for v in g.vertices() if matchf(v)]) else: candidates = g.vertex_set() - types = g.types() + phases = g.phases() - rs = g.rows() - - edge_list = [] - consumed_vertices : Set[VT] = set() + types = g.types() + i = 0 + consumed_vertices: Set[VT] = set() m: List[MatchPivotType[VT]] = [] - inputs = g.inputs() while (num == -1 or i < num) and len(candidates) > 0: v = candidates.pop() - if types[v] != VertexType.Z or phases[v] not in (0,1) or g.is_ground(v): - continue - + if types[v] != VertexType.Z or phases[v] not in (0,1) or g.is_ground(v): continue + good_vert = True w = None bound = None for n in g.neighbors(v): - if types[n] != VertexType.Z: - good_vert = False - break - if len(g.neighbors(n)) == 1: # v is a phase gadget - good_vert = False - break - if n in consumed_vertices: - good_vert = False - break - if g.is_ground(n) in consumed_vertices: + if types[n] != VertexType.Z or len(g.neighbors(n)) == 1 or n in consumed_vertices or g.is_ground(n): good_vert = False break + boundaries = [] wrong_match = False for b in g.neighbors(n): - if types[b] == VertexType.BOUNDARY: - boundaries.append(b) - elif types[b] != VertexType.Z: - wrong_match = True - if len(boundaries) != 1 or wrong_match: # n is not on the boundary, - continue # has too many boundaries or has neighbors of wrong type + if types[b] == VertexType.BOUNDARY: boundaries.append(b) + elif types[b] != VertexType.Z: wrong_match = True + if len(boundaries) != 1 or wrong_match: continue # n is not on the boundary or has too many boundaries or has neighbors of wrong type if phases[n] and hasattr(phases[n], 'denominator') and phases[n].denominator == 2: w = n bound = boundaries[0] @@ -574,23 +543,19 @@ def match_pivot_boundary( w = n bound = boundaries[0] if not good_vert or w is None: continue - if bound in inputs: mod = 0.5 - else: mod = -0.5 - v1 = g.add_vertex(VertexType.Z,-2,rs[w]+mod,phases[w]) - v2 = g.add_vertex(VertexType.Z,-1,rs[w]+mod,0) - g.set_phase(w, 0) - g.update_phase_index(w,v1) - edge_list.append(g.edge(w,v2)) - edge_list.append(g.edge(v1,v2)) - for n in g.neighbors(v): consumed_vertices.add(n) - for n in g.neighbors(w): consumed_vertices.add(n) assert bound is not None - m.append((v,w,[],[bound])) + + m.append((v,w,tuple(),tuple([bound]))) i += 1 - for n in g.neighbors(v): candidates.discard(n) - for n in g.neighbors(w): candidates.discard(n) - - g.add_edges(edge_list, EdgeType.HADAMARD) + + if allow_interacting_matches: continue + for n in g.neighbors(v): + consumed_vertices.add(n) + candidates.discard(n) + for n in g.neighbors(w): + consumed_vertices.add(n) + candidates.discard(n) + return m def pivot(g: BaseGraph[VT,ET], matches: List[MatchPivotType[VT]]) -> RewriteOutputType[ET,VT]: @@ -605,21 +570,19 @@ def pivot(g: BaseGraph[VT,ET], matches: List[MatchPivotType[VT]]) -> RewriteOutp rem_verts: List[VT] = [] rem_edges: List[ET] = [] etab: Dict[ET,List[int]] = dict() - + + phases = g.phases() for m in matches: - # compute: - # n[0] <- non-boundary neighbors of m[0] only - # n[1] <- non-boundary neighbors of m[1] only - # n[2] <- non-boundary neighbors of m[0] and m[1] - g.update_phase_index(m[0],m[1]) n = [set(g.neighbors(m[0])), set(g.neighbors(m[1]))] for i in range(2): n[i].remove(m[1-i]) # type: ignore # Really complex typing situation if len(m[i+2]) == 1: n[i].remove(m[i+2][0]) # type: ignore - n.append(n[0] & n[1]) - n[0] = n[0] - n[2] - n[1] = n[1] - n[2] + + n.append(n[0] & n[1]) # n[2] <- non-boundary neighbors of m[0] and m[1] + n[0] = n[0] - n[2] # n[0] <- non-boundary neighbors of m[0] only + n[1] = n[1] - n[2] # n[1] <- non-boundary neighbors of m[1] only + es = ([g.edge(s,t) for s in n[0] for t in n[1]] + [g.edge(s,t) for s in n[1] for t in n[2]] + [g.edge(s,t) for s in n[0] for t in n[2]]) @@ -627,62 +590,87 @@ def pivot(g: BaseGraph[VT,ET], matches: List[MatchPivotType[VT]]) -> RewriteOutp g.scalar.add_power(k0*k2 + k1*k2 + k0*k1) for v in n[2]: - if not g.is_ground(v): - g.add_to_phase(v, 1) - - if g.phase(m[0]) and g.phase(m[1]): g.scalar.add_phase(Fraction(1)) - if not m[2] and not m[3]: - g.scalar.add_power(-(k0+k1+2*k2-1)) - elif not m[2]: - g.scalar.add_power(-(k1+k2)) + if not g.is_ground(v): g.add_to_phase(v, 1) + + if phases[m[0]] and phases[m[1]]: g.scalar.add_phase(Fraction(1)) + if not m[2] and not m[3]: g.scalar.add_power(-(k0+k1+2*k2-1)) + elif not m[2]: g.scalar.add_power(-(k1+k2)) else: g.scalar.add_power(-(k0+k2)) - for i in range(2): - # if m[i] has a phase, it will get copied on to the neighbors of m[1-i]: - a = g.phase(m[i]) # type: ignore + for i in range(2): # if m[i] has a phase, it will get copied on to the neighbors of m[1-i]: + a = phases[m[i]] # type: ignore if a: for v in n[1-i]: - if not g.is_ground(v): - g.add_to_phase(v, a) + if not g.is_ground(v): g.add_to_phase(v, a) for v in n[2]: - if not g.is_ground(v): - g.add_to_phase(v, a) + if not g.is_ground(v): g.add_to_phase(v, a) - if not m[i+2]: - # if there is no boundary, the other vertex is destroyed - rem_verts.append(m[1-i]) # type: ignore + if not m[i+2]: rem_verts.append(m[1-i]) # type: ignore # if there is no boundary, the other vertex is destroyed else: - # if there is a boundary, toggle whether it is an h-edge or a normal edge - # and point it at the other vertex - e = g.edge(m[i], m[i+2][0]) # type: ignore - new_e = g.edge(m[1-i], m[i+2][0]) # type: ignore + e = g.edge(m[i], m[i+2][0]) # type: ignore # if there is a boundary, toggle whether it is an h-edge or a normal edge + new_e = g.edge(m[1-i], m[i+2][0]) # type: ignore # and point it at the other vertex ne,nhe = etab.get(new_e, [0,0]) if g.edge_type(e) == EdgeType.SIMPLE: nhe += 1 elif g.edge_type(e) == EdgeType.HADAMARD: ne += 1 etab[new_e] = [ne,nhe] rem_edges.append(e) - - + for e in es: nhe = etab.get(e, (0,0))[1] etab[e] = [0,nhe+1] - + return (etab, rem_verts, rem_edges, True) -MatchLcompType = Tuple[VT,List[VT]] +def pivot_gadget(g: BaseGraph[VT,ET], matches: List[MatchPivotType[VT]]) -> RewriteOutputType[ET,VT]: + """Performs the gadgetizations required before applying pivots. + ``m[0]`` : interior pauli vertex + ``m[1]`` : interior non-pauli vertex to gadgetize + ``m[2]`` : list of zero or one boundaries adjacent to ``m[0]``. + ``m[3]`` : list of zero or one boundaries adjacent to ``m[1]``. + """ + vertices_to_gadgetize = [m[1] for m in matches] + gadgetize(g, vertices_to_gadgetize) + return pivot(g, matches) + +def gadgetize(g: BaseGraph[VT,ET], vertices: List[VT]) -> None: + """Helper function which pulls out a list of vertices into gadgets""" + edge_list = [] + + inputs = g.inputs() + phases = g.phases() + + for v in vertices: + if any(n in inputs for n in g.neighbors(v)): mod = 0.5 + else: mod = -0.5 + + vp = g.add_vertex(VertexType.Z,-2,g.row(v)+mod,phases[v]) + v0 = g.add_vertex(VertexType.Z,-1,g.row(v)+mod,0) + g.set_phase(v, 0) + + edge_list.append(g.edge(v,v0)) + edge_list.append(g.edge(v0,vp)) + + if g.phase_tracking: g.unfuse_vertex(vp,v) + + g.add_edges(edge_list, EdgeType.HADAMARD) + return + + +MatchLcompType = Tuple[VT,Tuple[VT,...]] def match_lcomp(g: BaseGraph[VT,ET]) -> List[MatchLcompType[VT]]: """Same as :func:`match_lcomp_parallel`, but with ``num=1``""" return match_lcomp_parallel(g, num=1, check_edge_types=True) def match_lcomp_parallel( - g: BaseGraph[VT,ET], - vertexf:Optional[Callable[[VT],bool]]=None, - num:int=-1, - check_edge_types:bool=True + g: BaseGraph[VT,ET], + vertexf: Optional[Callable[[VT],bool]] = None, + num: int = -1, + check_edge_types: bool = True, + allow_interacting_matches: bool = False ) -> List[MatchLcompType[VT]]: - """Finds noninteracting matchings of the local complementation rule. - + """Finds matches of the local complementation rule. + :param g: An instance of a ZX-graph. :param num: Maximal amount of matchings to find. If -1 (the default) tries to find as many as possible. @@ -691,36 +679,37 @@ def match_lcomp_parallel( :param vertexf: An optional filtering function for candidate vertices, should return True if a vertex should be considered as a match. Passing None will consider all vertices. + :param allow_interacting_matches: Whether or not to allow matches which overlap, + hence can not all be applied at once. Defaults to False. :rtype: List of 2-tuples ``(vertex, neighbors)``. """ if vertexf is not None: candidates = set([v for v in g.vertices() if vertexf(v)]) else: candidates = g.vertex_set() - types = g.types() + phases = g.phases() - + types = g.types() + i = 0 - m = [] + m: List[MatchLcompType[VT]] = [] while (num == -1 or i < num) and len(candidates) > 0: v = candidates.pop() - vt = types[v] - va = g.phase(v) - - if vt != VertexType.Z: continue - if not (va == Fraction(1,2) or va == Fraction(3,2)): continue - - if g.is_ground(v): - continue + + if types[v] != VertexType.Z: continue + if phases[v] not in (Fraction(1,2), Fraction(3,2)): continue + if g.is_ground(v): continue if check_edge_types and not ( all(g.edge_type(e) == EdgeType.HADAMARD for e in g.incident_edges(v)) ): continue vn = list(g.neighbors(v)) - - if not all(types[n] == VertexType.Z for n in vn): continue - + if any(types[n] != VertexType.Z for n in vn): continue + + m.append((v,tuple(vn))) + i += 1 + + if allow_interacting_matches: continue for n in vn: candidates.discard(n) - m.append((v,vn)) return m def lcomp(g: BaseGraph[VT,ET], matches: List[MatchLcompType[VT]]) -> RewriteOutputType[ET,VT]: @@ -730,24 +719,31 @@ def lcomp(g: BaseGraph[VT,ET], matches: List[MatchLcompType[VT]]) -> RewriteOutp for more details on the rewrite""" etab: Dict[ET,List[int]] = dict() rem = [] - for m in matches: - a = g.phase(m[0]) - rem.append(m[0]) - assert isinstance(a,Fraction) # For mypy - if a.numerator == 1: g.scalar.add_phase(Fraction(1,4)) + + phases = g.phases() + + for v, vn in matches: + p = phases[v] + rem.append(v) + assert isinstance(p,Fraction) + + if p.numerator == 1: g.scalar.add_phase(Fraction(1,4)) else: g.scalar.add_phase(Fraction(7,4)) - n = len(m[1]) + + n = len(vn) g.scalar.add_power((n-2)*(n-1)//2) + for i in range(n): - if not g.is_ground(m[1][i]): - g.add_to_phase(m[1][i], -a) + if not g.is_ground(vn[i]): + g.add_to_phase(vn[i], -p) for j in range(i+1, n): - e = g.edge(m[1][i],m[1][j]) + e = g.edge(vn[i],vn[j]) he = etab.get(e, [0,0])[1] etab[e] = [0, he+1] return (etab, rem, [], True) + MatchIdType = Tuple[VT,VT,VT,EdgeType.Type] def match_ids(g: BaseGraph[VT,ET]) -> List[MatchIdType[VT]]: @@ -755,45 +751,52 @@ def match_ids(g: BaseGraph[VT,ET]) -> List[MatchIdType[VT]]: return match_ids_parallel(g, num=1) def match_ids_parallel( - g: BaseGraph[VT,ET], - vertexf:Optional[Callable[[VT],bool]]=None, - num:int=-1 + g: BaseGraph[VT,ET], + vertexf: Optional[Callable[[VT],bool]] = None, + num: int = -1, + allow_interacting_matches: bool = False ) -> List[MatchIdType[VT]]: - """Finds non-interacting identity vertices. - + """Finds matches of identity vertices. + :param g: An instance of a ZX-graph. :param num: Maximal amount of matchings to find. If -1 (the default) tries to find as many as possible. :param vertexf: An optional filtering function for candidate vertices, should return True if a vertex should be considered as a match. Passing None will consider all vertices. + :param allow_interacting_matches: Whether or not to allow matches which overlap, + hence can not all be applied at once :rtype: List of 4-tuples ``(identity_vertex, neighbor1, neighbor2, edge_type)``. """ if vertexf is not None: candidates = set([v for v in g.vertices() if vertexf(v)]) else: candidates = g.vertex_set() + types = g.types() phases = g.phases() - + i = 0 - m:List[MatchIdType[VT]] = [] - + m: List[MatchIdType[VT]] = [] while (num == -1 or i < num) and len(candidates) > 0: v = candidates.pop() - if phases[v] != 0 or not vertex_is_zx(types[v]) or g.is_ground(v): - continue - neigh = g.neighbors(v) - if len(neigh) != 2: continue - v0, v1 = neigh + if phases[v] != 0 or not vertex_is_zx(types[v]) or g.is_ground(v): continue + + vn = g.neighbors(v) + if len(vn) != 2: continue + v0, v1 = vn + if (g.is_ground(v0) and types[v1] == VertexType.BOUNDARY or - g.is_ground(v1) and types[v0] == VertexType.BOUNDARY): - # Do not put ground spiders on the boundary + g.is_ground(v1) and types[v0] == VertexType.BOUNDARY): # Do not put ground spiders on the boundary continue - candidates.discard(v0) - candidates.discard(v1) + if g.edge_type(g.edge(v,v0)) != g.edge_type(g.edge(v,v1)): #exactly one of them is a hadamard edge m.append((v,v0,v1,EdgeType.HADAMARD)) else: m.append((v,v0,v1,EdgeType.SIMPLE)) i += 1 + + if allow_interacting_matches: continue + candidates.discard(v0) + candidates.discard(v1) + return m def remove_ids(g: BaseGraph[VT,ET], matches: List[MatchIdType[VT]]) -> RewriteOutputType[ET,VT]: @@ -807,15 +810,17 @@ def remove_ids(g: BaseGraph[VT,ET], matches: List[MatchIdType[VT]]) -> RewriteOu if not e in etab: etab[e] = [0,0] if et == EdgeType.SIMPLE: etab[e][0] += 1 else: etab[e][1] += 1 + return (etab, rem, [], False) -MatchGadgetType = Tuple[VT,VT,FractionLike,List[VT],List[VT]] + +MatchGadgetType = Tuple[VT, int, List[VT], Dict[VT,VT]] def match_phase_gadgets(g: BaseGraph[VT,ET],vertexf:Optional[Callable[[VT],bool]]=None) -> List[MatchGadgetType[VT]]: """Determines which phase gadgets act on the same vertices, so that they can be fused together. :param g: An instance of a ZX-graph. - :rtype: List of 5-tuples ``(axel,leaf, total combined phase, other axels with same targets, other leafs)``. + :rtype: List of 4-tuples ``(leaf, parity_length, other axels with same targets, leaf dictionary)``. """ if vertexf is not None: candidates = set([v for v in g.vertices() if vertexf(v)]) else: candidates = g.vertex_set() @@ -844,37 +849,42 @@ def match_phase_gadgets(g: BaseGraph[VT,ET],vertexf:Optional[Callable[[VT],bool] for par, gad in parities.items(): if len(gad) == 1: n = gad[0] - v = gadgets[n] - if phases[n] != 0: # If the phase of the axel vertex is pi, we change the phase of the gadget - g.scalar.add_phase(phases[v]) - g.phase_negate(v) - m.append((v,n,-phases[v],[],[])) + if phases[n] != 0: + m.append((n, len(par), [], gadgets)) else: - totphase = sum((1 if phases[n]==0 else -1)*phases[gadgets[n]] for n in gad)%2 - for n in gad: - if phases[n] != 0: - g.scalar.add_phase(phases[gadgets[n]]) - g.phase_negate(gadgets[n]) - g.scalar.add_power(-((len(par)-1)*(len(gad)-1))) n = gad.pop() - v = gadgets[n] - m.append((v,n,totphase, gad, [gadgets[n] for n in gad])) + m.append((n, len(par), gad, gadgets)) return m def merge_phase_gadgets(g: BaseGraph[VT,ET], matches: List[MatchGadgetType[VT]]) -> RewriteOutputType[ET,VT]: """Given the output of :func:``match_phase_gadgets``, removes phase gadgets that act on the same set of targets.""" rem = [] - for v, n, phase, othergadgets, othertargets in matches: + phases = g.phases() + for n, par_num, gad, gadgets in matches: + v = gadgets[n] + if len(gad) == 0: + if phases[n] != 0: + g.scalar.add_phase(phases[v]) + if g.phase_tracking: g.phase_negate(v) + phase = -phases[v] + else: + phase = sum((1 if phases[w]==0 else -1)*phases[gadgets[w]] for w in gad+[n])%2 + for w in gad+[n]: + if phases[w] != 0: + g.scalar.add_phase(phases[gadgets[w]]) + if g.phase_tracking: g.phase_negate(gadgets[w]) + g.scalar.add_power(-((par_num-1)*len(gad))) g.set_phase(v, phase) g.set_phase(n, 0) - rem.extend(othergadgets) + othertargets = [gadgets[w] for w in gad] + rem.extend(gad) rem.extend(othertargets) for w in othertargets: - g.fuse_phases(v,w) - if g.merge_vdata is not None: - g.merge_vdata(v, w) + if g.phase_tracking: g.fuse_phases(v,w) + if g.merge_vdata is not None: g.merge_vdata(v, w) return ({}, rem, [], False) + MatchSupplementarityType = Tuple[VT,VT,Literal[1,2],FrozenSet[VT]] def match_supplementarity(g: BaseGraph[VT,ET], vertexf:Optional[Callable[[VT],bool]]=None) -> List[MatchSupplementarityType[VT]]: @@ -950,10 +960,11 @@ def apply_supplementarity( else: raise Exception("Shouldn't happen") return ({}, rem, [], True) + MatchCopyType = Tuple[VT,VT,FractionLike,FractionLike,List[VT]] def match_copy( - g: BaseGraph[VT,ET], + g: BaseGraph[VT,ET], vertexf:Optional[Callable[[VT],bool]]=None ) -> List[MatchCopyType[VT]]: """Finds spiders with a 0 or pi phase that have a single neighbor, @@ -975,7 +986,6 @@ def match_copy( m.append((v,w,phases[v],phases[w],neigh)) candidates.discard(w) candidates.difference_update(neigh) - return m def apply_copy(g: BaseGraph[VT,ET], matches: List[MatchCopyType[VT]]) -> RewriteOutputType[ET,VT]: @@ -997,8 +1007,8 @@ def apply_copy(g: BaseGraph[VT,ET], matches: List[MatchCopyType[VT]]) -> Rewrite g.add_to_phase(n, a) return ({}, rem, [], True) -MatchPhasePolyType = Tuple[List[VT], Dict[FrozenSet[VT],Union[VT,Tuple[VT,VT]]]] +MatchPhasePolyType = Tuple[List[VT], Dict[FrozenSet[VT],Union[VT,Tuple[VT,VT]]]] def match_gadgets_phasepoly(g: BaseGraph[VT,ET]) -> List[MatchPhasePolyType[VT]]: """Finds groups of phase-gadgets that act on the same set of 4 vertices in order to apply a rewrite based on @@ -1104,3 +1114,369 @@ def apply_gadget_phasepoly(g: BaseGraph[VT,ET], matches: List[MatchPhasePolyType phase = 0 g.add_edges([g.edge(n,v)]+[g.edge(n,w) for w in group],EdgeType.HADAMARD) g.set_phase(v, phase + Fraction(7,4)) + + +MatchIdFuseType = Tuple[VT,VT,VT] + +def match_id_fuse( + g: BaseGraph[VT,ET], + matchf: Optional[Callable[[VT], bool]] = None, + num: int = -1, + allow_interacting_matches: bool = False + ) -> List[MatchIdFuseType[VT]]: + """Finds matches of the identity fusion rule (identity removal followed immediately by spider fusion) + + :param g: An instance of a ZX-graph + :param matchf: An optional filtering function for candidate edge, should + return True if a edge should considered as a match. Defaults to None + :param num: Maximal amount of matchings to find. If -1 (the default) + tries to find as many as possible., defaults to -1 + :param allow_interacting_matches: Whether or not to allow matches which overlap, + hence can not all be applied at once. Defaults to False. + :return: List of 3-tuples. See :func:`id_fuse` for the details. + """ + if matchf is not None: candidates = {v for v in g.vertices() if matchf(v)} + else: candidates = g.vertex_set() + + phases = g.phases() + types = g.types() + + i = 0 + m: List[MatchIdFuseType] = [] + while candidates and (num == -1 or i < num): + v = candidates.pop() + phase = phases[v] + + if not vertex_is_zx(types[v]) or g.is_ground(v): continue + if g.phase_tracking: + if g.check_phase(v, phase, 0) is False: continue + elif phase != 0: continue + + ns = g.neighbors(v) + if len(ns) != 2: continue + v0, v1 = ns + + if not (vertex_is_zx(types[v0]) and types[v0] == types[v1]): continue + if g.edge_type(g.edge(v,v0)) != g.edge_type(g.edge(v,v1)): continue # Do not put ground spiders on the boundary + if any(len(g.neighbors(u)) == 1 for u in (v0,v1)): continue # Phase gadget + + m.append((v,v0,v1)) + i += 1 + + if allow_interacting_matches: continue + candidates.discard(v0) + candidates.discard(v1) + for n in g.neighbors(v0): + candidates.discard(n) + for n2 in g.neighbors(n): candidates.discard(n2) + for n in g.neighbors(v1): + candidates.discard(n) + for n2 in g.neighbors(n): candidates.discard(n2) + return m + +def id_fuse(g: BaseGraph[VT,ET], matches: List[MatchIdFuseType[VT]]) -> RewriteOutputType[ET,VT]: + """Perform a identity fusion rewrite, given a list of matches as returned by + ``match_id_fuse``. A match is itself a tuple where: + + ``m[0]`` : The central identity vertex to be removed. + ``m[1]`` : The first neighbour of the central vertex. + ``m[2]`` : The second neighbour of the central vertex. + """ + rem_verts = [] + etab: Dict[ET,List[int]] = dict() + + phases = g.phases() + + for id_v, v0, v1 in matches: + rem_verts.append(id_v) + + if g.is_ground(v0) or g.is_ground(v1): + g.set_phase(v0, 0) + g.set_ground(v0) + else: g.add_to_phase(v0, phases[v1]) + + if g.phase_tracking: + g.fix_phase(id_v, phases[id_v], 0) + g.fuse_phases(v0, v1) + + rem_verts.append(v1) # always delete the second vertex in the match + + for w in g.neighbors(v1): # edges from the second vertex are transferred to the first + if w in [id_v, v0]: continue + e = g.edge(v0,w) + etab.setdefault(e, [0,0])[g.edge_type(g.edge(v1,w)) - 1] += 1 + + return (etab, rem_verts, [], True) + + +def unfuse_neighbours(g: BaseGraph[VT,ET], v: VT, neighbours_to_unfuse: Tuple[VT,...], desired_phase: FractionLike) -> Tuple[VT,VT]: + """Helper function which unfuses a vertex onto a set of neighbours, leaving it with a desired phase.""" + unfused_phase = g.phase(v) - desired_phase + + vp = g.add_vertex(VertexType.Z, -2, g.row(v), unfused_phase) + v0 = g.add_vertex(VertexType.Z, -1, g.row(v)) + + g.set_phase(v, desired_phase) + g.add_edge(g.edge(v,v0), EdgeType.HADAMARD) + g.add_edge(g.edge(v0,vp), EdgeType.HADAMARD) + + for n in neighbours_to_unfuse: + g.add_edge(g.edge(vp, n), g.edge_type(g.edge(v, n))) + g.remove_edge(g.edge(v, n)) + + if g.phase_tracking: g.unfuse_vertex(vp, v) + + return v0, vp + + +MatchLcompUnfuseType = Tuple[VT,Tuple[VT,...],Tuple[VT,...]] + +def match_lcomp_unfuse( + g: BaseGraph[VT,ET], + matchf: Optional[Callable[[VT], bool]] = None, + num: int = -1, + allow_interacting_matches: bool = True, + max_unfusions: int = 0, + **kwargs: Any + ) -> List[MatchLcompUnfuseType]: + """Finds matches of the local complementation rule including unfusions onto (a set maximum) number of neighbours. + Increasing ``max_unfusions`` scales the number of matches exponentially. + Note that the different unfusion match variations cannot be applied at once. + + :param g: An instance of a ZX-graph + :param matchf: An optional filtering function for candidate edge, should + return True if a edge should considered as a match. Defaults to None + :param num: Maximal amount of matchings to find. If -1 (the default) + tries to find as many as possible., defaults to -1 + :param allow_interacting_matches: Whether or not to allow matches which overlap, + hence can not all be applied at once. Defaults to True. + :param max_unfusions: The maximum number of neighours to unfuse onto. + :return: List of 3-tuples. See :func:`lcomp_unfuse` for the details. + """ + if matchf is not None: candidates = {v for v in g.vertices() if matchf(v)} + else: candidates = g.vertex_set() + + phases = g.phases() + types = g.types() + + i = 0 + m: List[MatchLcompUnfuseType] = [] + while candidates and (num == -1 or i < num): + v = candidates.pop() + + if types[v] != VertexType.Z or g.is_ground(v) or g.vertex_degree(v) == 1: continue + + vn = list(g.neighbors(v)) + + vb = [n for n in vn if types[n] == VertexType.BOUNDARY] + if any(types[n] != VertexType.Z or g.edge_type(g.edge(v, n)) != EdgeType.HADAMARD for n in vn if n not in vb): continue + + for subset_size in range(min(len(vn)-1, max_unfusions+1)): + for neighbours_to_unfuse in itertools.combinations(vn, subset_size): + if not set(vb).issubset(set(neighbours_to_unfuse)): continue + + if len(neighbours_to_unfuse) == 0: + phase = phases[v] + if g.phase_tracking: + if not (g.check_phase(v, phase, Fraction(1,2)) or g.check_phase(v, phase, Fraction(3,2))): continue + elif phase not in (Fraction(1,2), Fraction(3,2)): continue + + m.append((v,tuple(vn),neighbours_to_unfuse)) + + i += 1 + + if allow_interacting_matches: continue + candidates.difference_update(vn) + return m + +def lcomp_unfuse(g: BaseGraph[VT,ET], matches: List[MatchLcompUnfuseType[VT]]) -> RewriteOutputType[ET,VT]: + """Perform a local complemntation unfusion rewrite, given a list of matches as returned by + ``match_lcomp_unfuse``. A match is itself a tuple where: + + ``m[0]`` : The central identity vertex to be removed. + ``m[1]`` : The first neighbour of the central vertex. + ``m[2]`` : The second neighbour of the central vertex. + """ + updated_matches: List[MatchLcompType] = [] + + phases = g.phases() + + for v, vn, neighbours_to_unfuse in matches: + if not neighbours_to_unfuse: + if g.phase_tracking: + phase = phases[v] + if g.check_phase(v, phase, Fraction(1,2)): g.fix_phase(v, phase, Fraction(1,2)) + else: g.fix_phase(v, phase, Fraction(3,2)) + updated_matches.append((v,tuple(vn))) + else: + v0, vp = unfuse_neighbours(g, v, neighbours_to_unfuse, Fraction(1,2)) + updated_matches.append((v, tuple(v for v in vn if v not in neighbours_to_unfuse) + (v0,))) + + return lcomp(g, updated_matches) + + +MatchPivotUnfuseType = Tuple[VT,VT,Tuple[Tuple[VT,...],...]] + +def match_pivot_unfuse( + g: BaseGraph[VT,ET], + matchf: Optional[Callable[[ET], bool]] = None, + num: int = -1, + allow_interacting_matches: bool = False, + max_unfusions: int = 0, + **kwargs: Any + ) -> List[MatchPivotUnfuseType[VT]]: + """Finds matches of the pivot rule including unfusions onto (a set maximum) number of neighbours from each vertex. + Increasing ``max_unfusions`` scales the number of matches exponentially. + Note that the different unfusion match variations cannot be applied at once. + + :param g: An instance of a ZX-graph + :param matchf: An optional filtering function for candidate edge, should + return True if a edge should considered as a match. Defaults to None + :param num: Maximal amount of matchings to find. If -1 (the default) + tries to find as many as possible., defaults to -1 + :param allow_interacting_matches: Whether or not to allow matches which overlap, + hence can not all be applied at once. Defaults to True. + :param max_unfusions: The maximum number of neighours to unfuse onto for each vertex. + :return: List of 3-tuples. See :func:`pivot_unfuse` for the details. + """ + if matchf: candidates = {e for e in g.edges() if matchf(e) and g.edge_type(e) == EdgeType.HADAMARD} + else: candidates = {e for e in g.edges() if g.edge_type(e) == EdgeType.HADAMARD} + + phases = g.phases() + types = g.types() + + i = 0 + m: List[MatchPivotUnfuseType] = [] + while (num == -1 or i < num) and len(candidates) > 0: + e = candidates.pop() + + v0, v1 = g.edge_st(e) + if types[v0] != VertexType.Z or types[v1] != VertexType.Z or g.is_ground(v0) or g.is_ground(v1): continue + + v0n = list(g.neighbors(v0)) + v1n = list(g.neighbors(v1)) + if len(v0n) == 1 or len(v1n) == 1: continue + + v0b = set() + for n in v0n: + ty = types[n] + if ty == VertexType.BOUNDARY: + v0b.add(n) + continue + if ty != VertexType.Z or g.edge_type(g.edge(v0, n)) != EdgeType.HADAMARD: continue + + v1b = set() + for n in v1n: + ty = types[n] + if ty == VertexType.BOUNDARY: + v1b.add(n) + continue + if ty != VertexType.Z or g.edge_type(g.edge(v1, n)) != EdgeType.HADAMARD: continue + + if g.phase_tracking: pivot_phases = g.check_two_pauli_phases(v0, phases[v0], v1, phases[v1]) + else: pivot_phases = [p if p in {0, 1} else None for p in (phases[v0], phases[v1])] + + max_subset_v0 = min(len(v0n) - 1, max_unfusions + 1) + max_subset_v1 = min(len(v1n) - 1, max_unfusions + 1) + for subset_size_v0 in range(max_subset_v0): + for neighbours_to_unfuse_0 in itertools.combinations(v0n, subset_size_v0): + if v1 in neighbours_to_unfuse_0: continue + if v0b and not v0b.issubset(neighbours_to_unfuse_0): continue + + for subset_size_v1 in range(max_subset_v1): + for neighbours_to_unfuse_1 in itertools.combinations(v1n, subset_size_v1): + if v0 in neighbours_to_unfuse_1: continue + if v1b and not v1b.issubset(neighbours_to_unfuse_1): continue + + if (pivot_phases is None and not (neighbours_to_unfuse_0 or neighbours_to_unfuse_1)) or \ + (pivot_phases == [None, None] and (not neighbours_to_unfuse_0 or not neighbours_to_unfuse_1)) or \ + (pivot_phases and pivot_phases[0] is None and not neighbours_to_unfuse_0) or \ + (pivot_phases and pivot_phases[1] is None and not neighbours_to_unfuse_1): + continue + + m.append((v0, v1, (tuple(neighbours_to_unfuse_0), tuple(neighbours_to_unfuse_1)))) + + i += 1 + + if allow_interacting_matches: continue + for n in v0n + v1n: + candidates -= set(g.incident_edges(n)) + return m + +def pivot_unfuse(g: BaseGraph[VT,ET], matches: List[MatchPivotUnfuseType[VT]]) -> RewriteOutputType[ET,VT]: + """Perform a pivot unfusion rewrite, given a list of matches as returned by + ``match_pivot_unfuse``. A match is itself a tuple where: + + ``m[0]`` : The first pivot vertex + ``m[1]`` : The second pivot vertex + ``m[2]`` : 2-tuple containing tuples of the neighbours to unfuse from each respective vertex + """ + updated_matches: List[MatchPivotType[VT]] = [] + + phases = g.phases() + + for v0, v1, (neighbours_to_unfuse_0, neighbours_to_unfuse_1) in matches: + len_n0, len_n1 = len(neighbours_to_unfuse_0), len(neighbours_to_unfuse_1) + + if g.phase_tracking: + v0p, v1p = phases[v0], phases[v1] + pivot_phases = g.check_two_pauli_phases(v0, v0p, v1, v1p) + + if len_n0 == 0: + p0 = pivot_phases[0] if pivot_phases else 0 + assert p0 is not None + g.fix_phase(v0, v0p, p0) + if len_n1 == 0 and pivot_phases: + assert pivot_phases[1] is not None + g.fix_phase(v1, v1p, pivot_phases[1]) + elif len_n1 == 0: + p1 = pivot_phases[1] if pivot_phases else 0 + assert p1 is not None + g.fix_phase(v1, v1p, p1) + + if len_n0 > 0: unfuse_neighbours(g, v0, neighbours_to_unfuse_0, 0) + if len_n1 > 0: unfuse_neighbours(g, v1, neighbours_to_unfuse_1, 0) + + updated_matches.append((v0, v1, tuple(), tuple())) + + return pivot(g, updated_matches) + + +MatchUnfuseType = Union[Tuple[MatchLcompUnfuseType, None, None],Tuple[None, MatchPivotUnfuseType, None], Tuple[None,None,MatchIdFuseType]] + +def match_2Q_simp( + g: BaseGraph[VT,ET], + matchf: Optional[Callable[[Union[VT,ET]],bool]] = None, + rewrites: List[str] = ['id_fuse','lcomp','pivot'], + max_lc_unfusions: int = 0, + max_p_unfusions: int = 0, + **kwargs: Any + ) -> List[MatchUnfuseType]: + """Finds matches of :func:`lcomp_unfuse`, :func:`pivot_unfuse` and :func:`id_fuse`. + Increasing ``max_lc_unfusions`` or ``max_p_unfusions`` scales the number of matches exponentially. + + :param g: An instance of a ZX-graph + :param matchf: An optional filtering function for candidate edge, should + return True if a edge should considered as a match. Defaults to None + :param rewrites: A list containing which rewrites to apply. Defaults to ['id_fuse','lcomp','pivot'] + :param max_lc_unfusions: The maximum number of neighours to unfuse onto for each local complementation. + :param max_p_unfusions: The maximum number of neighours to unfuse onto for each pivot vertex. + :return: List of 3-tuples. See :func:`rewrite_2Q_simp` for the details. + """ + m: List[MatchUnfuseType] = [] + if 'lcomp' in rewrites: m.extend([(match,None,None) for match in match_lcomp_unfuse(g, matchf, allow_interacting_matches=True, max_unfusions=max_lc_unfusions)]) + if 'pivot' in rewrites: m.extend([(None,match,None) for match in match_pivot_unfuse(g, matchf, allow_interacting_matches=True, max_unfusions=max_p_unfusions)]) + if 'id_fuse' in rewrites: m.extend([(None,None,match) for match in match_id_fuse(g, matchf, allow_interacting_matches=True)]) + return m + +def rewrite_2Q_simp(g: BaseGraph[VT,ET], match: List[MatchUnfuseType]) -> RewriteOutputType[ET,VT]: + """Perform a 2Q_simp rewrite, given a list of matches as returned by + ``match_2Q_simp``. A match is itself a tuple where: + + ``m[0]`` : ``lcomp_unfuse`` match, otherwise None. + ``m[1]`` : ``pivot_unfuse`` match, otherwise None. + ``m[2]`` : ``id_fuse`` match, otherwise None. + """ + if match[0][0]: return lcomp_unfuse(g,[match[0][0]]) + if match[0][1]: return pivot_unfuse(g,[match[0][1]]) + if match[0][2]: return id_fuse(g,[match[0][2]]) \ No newline at end of file diff --git a/pyzx/scripts/circ2circ.py b/pyzx/scripts/circ2circ.py index 18cd3029..640fd449 100644 --- a/pyzx/scripts/circ2circ.py +++ b/pyzx/scripts/circ2circ.py @@ -73,7 +73,7 @@ def main(args): g = c.to_graph() if options.verbose: print("Running simplification algorithm...") if options.simp == 'tele': - g = simplify.teleport_reduce(g,quiet=(not options.verbose)) + simplify.teleport_reduce(g) c2 = Circuit.from_graph(g) c2 = c2.split_phase_gates() else: diff --git a/pyzx/scripts/circuit_router.py b/pyzx/scripts/circuit_router.py index 10c0f15e..d104e219 100644 --- a/pyzx/scripts/circuit_router.py +++ b/pyzx/scripts/circuit_router.py @@ -413,7 +413,7 @@ def route_circuit( compiled_circuit.compile() else: g = c.to_graph() - g = teleport_reduce(g) + teleport_reduce(g) interior_clifford_simp(g) g = g.copy() # reduces the number of gates when extracting. if type(architecture) == type(""): @@ -486,7 +486,7 @@ def fitness(permutation): for gate in reversed(c.gates): rev_c.add_gate(gate) rev_g = rev_c.to_graph() - rev_g = teleport_reduce(rev_g) + teleport_reduce(rev_g) interior_clifford_simp(rev_g) rev_g = rev_g.copy() # reduces the number of gates when extracting. diff --git a/pyzx/simplify.py b/pyzx/simplify.py index 91c1b227..df71d39e 100644 --- a/pyzx/simplify.py +++ b/pyzx/simplify.py @@ -24,13 +24,18 @@ __all__ = ['bialg_simp','spider_simp', 'id_simp', 'phase_free_simp', 'pivot_simp', 'pivot_gadget_simp', 'pivot_boundary_simp', 'gadget_simp', 'lcomp_simp', 'clifford_simp', 'tcount', 'to_gh', 'to_rg', - 'full_reduce', 'teleport_reduce', 'reduce_scalar', 'supplementarity_simp', - 'to_clifford_normal_form_graph', 'to_graph_like', 'is_graph_like'] + 'full_reduce', 'teleport_reduce', 'reduce_scalar', 'supplementarity_simp', + 'id_fuse_simp', 'to_graph_like', 'is_graph_like', 'basic_simp', 'flow_2Q_simp', + 'to_clifford_normal_form_graph'] -from typing import List, Callable, Optional, Union, Generic, Tuple, Dict, Iterator, cast +from typing import List, Callable, Optional, Union, Generic, Tuple, Dict, Iterator, Any, cast, DefaultDict +from collections import defaultdict +from .symbolic import Poly from .utils import EdgeType, VertexType, toggle_edge, vertex_is_zx, toggle_vertex from .rules import * +from .heuristics import * +from .flow import * from .graph.base import BaseGraph, VT, ET from .circuit import Circuit @@ -56,94 +61,120 @@ def simp( g: BaseGraph[VT,ET], name: str, match: Callable[..., List[MatchObject]], - rewrite: Callable[[BaseGraph[VT,ET],List[MatchObject]],RewriteOutputType[ET,VT]], - matchf:Optional[Union[Callable[[ET],bool], Callable[[VT],bool]]]=None, - quiet:bool=False, - stats:Optional[Stats]=None) -> int: - """Helper method for constructing simplification strategies based on the rules present in rules_. - It uses the ``match`` function to find matches, and then rewrites ``g`` using ``rewrite``. - If ``matchf`` is supplied, only the vertices or edges for which matchf() returns True are considered for matches. - - Example: - ``simp(g, 'spider_simp', rules.match_spider_parallel, rules.spider)`` - - Args: - g: The graph that needs to be simplified. - str name: The name to display if ``quiet`` is set to False. - match: One of the ``match_*`` functions of rules_. - rewrite: One of the rewrite functions of rules_. - matchf: An optional filtering function on candidate vertices or edges, which - is passed as the second argument to the match function. - quiet: Suppress output on numbers of matches found during simplification. - - Returns: - Number of iterations of ``rewrite`` that had to be applied before no more matches were found.""" - - i = 0 - new_matches = True - while new_matches: - new_matches = False - if matchf is not None: - m = match(g, matchf) - else: - m = match(g) - if len(m) > 0: - i += 1 - if i == 1 and not quiet: print("{}: ".format(name),end='') - if not quiet: print(len(m), end='') - #print(len(m), end='', flush=True) #flush only supported on Python >3.3 - etab, rem_verts, rem_edges, check_isolated_vertices = rewrite(g, m) - g.add_edge_table(etab) - g.remove_edges(rem_edges) - g.remove_vertices(rem_verts) - if check_isolated_vertices: g.remove_isolated_vertices() - if not quiet: print('. ', end='') - #print('. ', end='', flush=True) - new_matches = True - if stats is not None: stats.count_rewrites(name, len(m)) - if not quiet and i>0: print(' {!s} iterations'.format(i)) - return i - -def pivot_simp(g: BaseGraph[VT,ET], matchf:Optional[Callable[[ET],bool]]=None, quiet:bool=False, stats:Optional[Stats]=None) -> int: + rewrite: Callable[[BaseGraph[VT, ET], List[MatchObject]], RewriteOutputType[ET, VT]], + matchf: Union[Optional[Callable[[VT], bool]], Optional[Callable[[ET], bool]]] = None, + num: Optional[int] = None, + quiet: bool = False, + stats: Optional[Stats] = None + ) -> int: + """Helper method for constructing simplification strategies based on the rules present in rules_. + It uses the ``match`` function to find matches, then rewrites ``g`` using ``rewrite``. + + :param g: The graph to be simplified + :param name: The name to display if ``quiet`` is set to False. + :param match: One of the match functions of rules_. + :param rewrite: One of the rewrite functions of rules_. + :param matchf: An optional filtering function on candidate vertices or edges, passed as a second argument to the match function. + If it is supplied only vertices or edges whih return True are considered for matches. Defaults to None. + :param num: The maximum number of rewrites to perform, defaults to None. + :param quiet: Supress output on numbers of matches found during simplification, defaults to False. + :return: Number of iterations of ``rewrite`` that had to be applied before no more matches were found. + """ + num_iterations = 0 + total_rewrites = 0 + while True: + if matchf: + if num: + matches = match(g, matchf, num = -1 if num == -1 else num-total_rewrites) + else: + matches = match(g, matchf) + elif num: + matches = match(g, num = -1 if num == -1 else num-total_rewrites) + else: + matches = match(g) + + num_rewrites = len(matches) + if num_rewrites == 0: break + + num_iterations += 1 + total_rewrites += num_rewrites + + if num_iterations == 1 and not quiet: print(f'{name}: ',end='') + if not quiet: print(num_rewrites, end='') + + apply_rule(g,rewrite,matches) + + if not quiet: print('. ', end='') + if stats is not None: stats.count_rewrites(name, num_rewrites) + + if total_rewrites == num: break + + if not quiet and num_iterations > 0: print(f' {num_iterations} iterations') + return num_iterations + +def pivot_simp(g: BaseGraph[VT,ET], matchf: Optional[Callable[[ET], bool]] = None, quiet: bool = False, stats: Optional[Stats] = None) -> int: + """Iteratively applies the pivot rule until there are no more matches""" return simp(g, 'pivot_simp', match_pivot_parallel, pivot, matchf=matchf, quiet=quiet, stats=stats) -def pivot_gadget_simp(g: BaseGraph[VT,ET], matchf:Optional[Callable[[ET],bool]]=None, quiet:bool=False, stats:Optional[Stats]=None) -> int: - return simp(g, 'pivot_gadget_simp', match_pivot_gadget, pivot, matchf=matchf, quiet=quiet, stats=stats) +def pivot_gadget_simp(g: BaseGraph[VT,ET], matchf: Optional[Callable[[ET],bool]] = None, quiet: bool = False, stats: Optional[Stats] = None) -> int: + """Iteratively applies the pivot_gadget rule until there are no more matches""" + return simp(g, 'pivot_gadget_simp', match_pivot_gadget, pivot_gadget, matchf=matchf, quiet=quiet, stats=stats) -def pivot_boundary_simp(g: BaseGraph[VT,ET], matchf:Optional[Callable[[ET],bool]]=None, quiet:bool=False, stats:Optional[Stats]=None) -> int: - return simp(g, 'pivot_boundary_simp', match_pivot_boundary, pivot, matchf=matchf, quiet=quiet, stats=stats) +def pivot_boundary_simp(g: BaseGraph[VT,ET], matchf: Optional[Callable[[ET], bool]]=None, quiet: bool = False, stats: Optional[Stats] = None) -> int: + """Iteratively applies the pivot boundary rule until there are no more matches""" + return simp(g, 'pivot_boundary_simp', match_pivot_boundary, pivot_gadget, matchf=matchf, quiet=quiet, stats=stats) -def lcomp_simp(g: BaseGraph[VT,ET], matchf:Optional[Callable[[VT],bool]]=None, quiet:bool=False, stats:Optional[Stats]=None) -> int: +def lcomp_simp(g: BaseGraph[VT,ET], matchf:Optional[Callable[[VT],bool]] = None, quiet: bool = False, stats: Optional[Stats] = None) -> int: + """Iteratively applies the local complementation rule until there are no more matches""" return simp(g, 'lcomp_simp', match_lcomp_parallel, lcomp, matchf=matchf, quiet=quiet, stats=stats) -def bialg_simp(g: BaseGraph[VT,ET], quiet:bool=False, stats: Optional[Stats]=None) -> int: +def bialg_simp(g: BaseGraph[VT,ET], quiet:bool = False, stats: Optional[Stats] = None) -> int: + """Iteratively applies the bialgebra rule until there are no more matches""" return simp(g, 'bialg_simp', match_bialg_parallel, bialg, quiet=quiet, stats=stats) -def spider_simp(g: BaseGraph[VT,ET], matchf:Optional[Callable[[VT],bool]]=None, quiet:bool=False, stats:Optional[Stats]=None) -> int: +def spider_simp(g: BaseGraph[VT,ET], matchf: Optional[Callable[[VT],bool]] = None, quiet:bool = False, stats: Optional[Stats] = None) -> int: + """Iteratively applies the spider fusion boundary rule until there are no more matches""" return simp(g, 'spider_simp', match_spider_parallel, spider, matchf=matchf, quiet=quiet, stats=stats) -def id_simp(g: BaseGraph[VT,ET], matchf:Optional[Callable[[VT],bool]]=None, quiet:bool=False, stats:Optional[Stats]=None) -> int: +def id_simp(g: BaseGraph[VT,ET], matchf: Optional[Callable[[VT],bool]] = None, quiet: bool = False, stats: Optional[Stats] = None) -> int: + """Iteratively applies the identity removal rule until there are no more matches""" return simp(g, 'id_simp', match_ids_parallel, remove_ids, matchf=matchf, quiet=quiet, stats=stats) -def gadget_simp(g: BaseGraph[VT,ET], quiet:bool=False, stats:Optional[Stats]=None) -> int: +def id_fuse_simp(g: BaseGraph[VT,ET], matchf: Optional[Callable[[VT], bool]] = None, quiet :bool = False, stats: Optional[Stats] = None) -> int: + """Iteratively applies the identity fusion rule (identity removal followed by spider fusion) until there are no more matches""" + return simp(g, 'id_fuse', match_id_fuse, id_fuse, matchf=matchf, quiet=quiet, stats=stats) + +def gadget_simp(g: BaseGraph[VT,ET], quiet: bool = False, stats:Optional[Stats] = None) -> int: + """Iteratively applies the gadget fusion rule until there are no more matches""" return simp(g, 'gadget_simp', match_phase_gadgets, merge_phase_gadgets, quiet=quiet, stats=stats) -def supplementarity_simp(g: BaseGraph[VT,ET], quiet:bool=False, stats:Optional[Stats]=None) -> int: +def supplementarity_simp(g: BaseGraph[VT,ET], quiet: bool = False, stats: Optional[Stats] = None) -> int: + """Iteratively applies the supplementarity rule until there are no more matches""" return simp(g, 'supplementarity_simp', match_supplementarity, apply_supplementarity, quiet=quiet, stats=stats) -def copy_simp(g: BaseGraph[VT,ET], quiet:bool=False, stats:Optional[Stats]=None) -> int: +def copy_simp(g: BaseGraph[VT,ET], quiet: bool = False, stats: Optional[Stats] = None) -> int: """Copies 1-ary spiders with 0/pi phase through neighbors. WARNING: only use on maximally fused diagrams consisting solely of Z-spiders.""" return simp(g, 'copy_simp', match_copy, apply_copy, quiet=quiet, stats=stats) -def phase_free_simp(g: BaseGraph[VT,ET], quiet:bool=False, stats:Optional[Stats]=None) -> int: +def phase_free_simp(g: BaseGraph[VT,ET], quiet: bool = False, stats: Optional[Stats] = None) -> int: '''Performs the following set of simplifications on the graph: spider -> bialg''' i1 = spider_simp(g, quiet=quiet, stats=stats) i2 = bialg_simp(g, quiet=quiet, stats=stats) return i1+i2 -def interior_clifford_simp(g: BaseGraph[VT,ET], quiet:bool=False, stats:Optional[Stats]=None) -> int: +def basic_simp(g: BaseGraph[VT,ET], quiet: bool = True, stats: Optional[Stats] = None) -> int: + """Keeps doing the simplifications ``id_simp``, ``spider_simp`` until none of them can be applied anymore.""" + j = 0 + while True: + i1 = id_simp(g, quiet=quiet, stats=stats) + i2 = spider_simp(g, quiet=quiet, stats=stats) + if i1 + i2 == 0: break + j += 1 + return j + +def interior_clifford_simp(g: BaseGraph[VT,ET], quiet: bool = False, stats: Optional[Stats] = None) -> int: """Keeps doing the simplifications ``id_simp``, ``spider_simp``, ``pivot_simp`` and ``lcomp_simp`` until none of them can be applied anymore.""" spider_simp(g, quiet=quiet, stats=stats) @@ -158,7 +189,7 @@ def interior_clifford_simp(g: BaseGraph[VT,ET], quiet:bool=False, stats:Optional i += 1 return i -def clifford_simp(g: BaseGraph[VT,ET], quiet:bool=True, stats:Optional[Stats]=None) -> int: +def clifford_simp(g: BaseGraph[VT,ET], quiet: bool = True, stats: Optional[Stats] = None) -> int: """Keeps doing rounds of :func:`interior_clifford_simp` and :func:`pivot_boundary_simp` until they can't be applied anymore.""" i = 0 @@ -169,7 +200,7 @@ def clifford_simp(g: BaseGraph[VT,ET], quiet:bool=True, stats:Optional[Stats]=No break return i -def reduce_scalar(g: BaseGraph[VT,ET], quiet:bool=True, stats:Optional[Stats]=None) -> int: +def reduce_scalar(g: BaseGraph[VT,ET], quiet: bool = True, stats: Optional[Stats] = None) -> int: """Modification of ``full_reduce`` that is tailered for scalar ZX-diagrams. It skips the boundary pivots, and it additionally does ``supplementarity_simp``.""" i = 0 @@ -191,9 +222,7 @@ def reduce_scalar(g: BaseGraph[VT,ET], quiet:bool=True, stats:Optional[Stats]=No i += 1 return i - - -def full_reduce(g: BaseGraph[VT,ET], quiet:bool=True, stats:Optional[Stats]=None) -> None: +def full_reduce(g: BaseGraph[VT,ET], quiet: bool = True, stats: Optional[Stats] = None) -> None: """The main simplification routine of PyZX. It uses a combination of :func:`clifford_simp` and the gadgetization strategies :func:`pivot_gadget_simp` and :func:`gadget_simp`.""" interior_clifford_simp(g, quiet=quiet, stats=stats) @@ -203,74 +232,216 @@ def full_reduce(g: BaseGraph[VT,ET], quiet:bool=True, stats:Optional[Stats]=None i = gadget_simp(g, quiet=quiet, stats=stats) interior_clifford_simp(g,quiet=quiet, stats=stats) j = pivot_gadget_simp(g,quiet=quiet, stats=stats) - if i+j == 0: + if i+j == 0: break -def teleport_reduce(g: BaseGraph[VT,ET], quiet:bool=True, stats:Optional[Stats]=None) -> BaseGraph[VT,ET]: - """This simplification procedure runs :func:`full_reduce` in a way - that does not change the graph structure of the resulting diagram. - The only thing that is different in the output graph are the location and value of the phases.""" - s = Simplifier(g) - s.full_reduce(quiet=quiet, stats=stats) - return s.mastergraph - -class Simplifier(Generic[VT, ET]): - """Class used for :func:`teleport_reduce`.""" +class PhaseTeleporter(Generic[VT, ET]): + """Class used for phase teleportation.""" def __init__(self, g: BaseGraph[VT,ET]) -> None: - g.track_phases = True - self.mastergraph = g.copy() - self.simplifygraph = g.copy() - self.simplifygraph.set_phase_master(self) - self.phantom_phases: Dict[VT, Tuple[VT,int]] = dict() - - def fuse_phases(self,i1:int, i2: int) -> None: - try: - v1 = self.mastergraph.vertex_from_phase_index(i1) - v2 = self.mastergraph.vertex_from_phase_index(i2) - except ValueError: return - #self.mastergraph.phase_index[v2] = i1 - p1 = self.mastergraph.phase(v1) - p2 = self.mastergraph.phase(v2) - m1 = self.simplifygraph.phase_mult[i1] - m2 = self.simplifygraph.phase_mult[i2] - if (p2 == 0 or p2.denominator <= 2): # Deleted vertex contains Clifford phase - if v2 in self.phantom_phases: - v3,i3 = self.phantom_phases[v2] - m2 = cast(Literal[1, -1], m2*self.simplifygraph.phase_mult[i3]) - v2,i2 = v3,i3 - p2 = self.mastergraph.phase(v2) - else: return - if (p1 == 0 or p1.denominator <= 2): # Need to save non-Clifford location - self.simplifygraph.phase_mult[i1] = 1 - if v1 in self.phantom_phases: # Already fused with non-Clifford before - v3,i3 = self.phantom_phases[v1] - self.mastergraph.phase_index[v3] = i1 - del self.mastergraph.phase_index[v1] - p1 = self.mastergraph.phase(v3) - if (p1+p2).denominator <= 2: - del self.phantom_phases[v1] - v1,i1 = v3,i3 - m1 = cast(Literal[1, -1], m1*self.simplifygraph.phase_mult[i3]) - else: - self.phantom_phases[v1] = (v2,i2) - self.simplifygraph.phase_mult[i2] = m2 - return - if p1.denominator <= 2 or p2.denominator <= 2: raise Exception("Clifford phases here??") - # Both have non-Clifford phase - if m1*m2 == 1: phase = (p1 + p2)%2 - else: phase = p1 - p2 - self.mastergraph.set_phase(v1,phase) - self.mastergraph.set_phase(v2,0) - - self.simplifygraph.phase_mult[i2] = 1 - - def full_reduce(self, quiet:bool=True, stats:Optional[Stats]=None) -> None: - full_reduce(self.simplifygraph,quiet=quiet, stats=stats) - - - -def to_gh(g: BaseGraph[VT,ET],quiet:bool=True) -> None: + self.original_graph: BaseGraph[VT,ET] = g.copy() + self.parent_vertex: Dict[VT,VT] = {} + self.vertex_rank: Dict[VT,int] = {} + self.phase_mult: Dict[VT,int] = {} + self.non_clifford_vertices: Set[VT] = set() + for v in self.original_graph.vertices(): + if self.original_graph.phase(v).denominator > 2 or isinstance(self.original_graph.phase(v), Poly): + self.parent_vertex[v] = v + self.vertex_rank[v] = 0 + self.phase_mult[v] = 1 + self.non_clifford_vertices.add(v) + + def parent(self, v: VT) -> VT: + if self.parent_vertex[v] != v: + self.parent_vertex[v] = self.parent(self.parent_vertex[v]) + return self.parent_vertex[v] + + def get_vertex_groups(self) -> List[List[VT]]: + vertex_groups: DefaultDict[VT,List[VT]] = defaultdict(list) + for v in self.non_clifford_vertices: + root = self.parent(v) + vertex_groups[root].append(v) + return list(vertex_groups.values()) + + def fuse_phases(self, v1: VT, v2: VT) -> None: + if not all(v in self.non_clifford_vertices for v in (v1, v2)): return + self.parent_vertex[v2] = v1 + self.vertex_rank[v2] = self.vertex_rank[v1] + self.vertex_rank[v1] += 1 + + def phase_negate(self, v: VT) -> None: + root = self.parent(v) + verts = [vert for vert in self.non_clifford_vertices if self.parent(vert) == root] + for vert in verts: + self.phase_mult[vert] *= -1 + + def init_simplify_graph(self, fusing_mode: bool = True) -> None: + self.simplify_graph = self.original_graph.clone() + self.simplify_graph.set_phase_teleporter(self, fusing_mode) + + def teleport_phases(self, store:bool = False) -> None: + self.init_simplify_graph() + full_reduce(self.simplify_graph) + self.init_simplify_graph(fusing_mode = False) + if not store: self.simplify_graph.place_tracked_phases() + +def teleport_reduce(g: BaseGraph[VT,ET], store: bool = False) -> None: + """This simplification procedure performs phase teleportation, running :func:`full_reduce` + to find simplifications which do not change the graph structure of the resulting diagram. + The only thing that is different in the output graph are the location and value of the non-Clifford phases. + ``store`` provides an option to store the different possiblities for the phases and locations without placing them on the graph `yet`, + giving more flexibility for future simplifications. + Currently this is only of use when using :func:`flow_2Q_simp`. + The phases can be placed at any time using ``g.place_tracked_phases()`` + + :param g: The graph to be simplified + :param store: Whether or not to store the phases rather than placing them onto the graph, defaults to False + + Warning: + If ``store`` is True, the resulting graph will not represent the same graph as the original graph. Only once all phases have been placed will the graph be equal. + """ + s = PhaseTeleporter(g) + s.teleport_phases(store = store) + g.replace(s.simplify_graph) + + +def selective_simp( + g: BaseGraph[VT,ET], + get_matches: Callable[..., List[MatchObject]], + match_score: Callable[..., Optional[float]], + update_matches: Callable[..., Dict[MatchObject, float]], + rewrite: Callable[[BaseGraph[VT, ET],List[MatchObject]], RewriteOutputType[ET, VT]], + matchf: Union[Optional[Callable[[VT], bool]], Optional[Callable[[ET], bool]]] = None, + condition: Callable[..., bool] = lambda *args: True, + num: int = -1, + **kwargs: Any + ) -> int: + """Helper method for constructing simplification strategies in which each match is assigned a score, and + the highest score is iteratively chosen, updating the matches each time a rewrite has been performed. + + :param g: The graph to be simplified + :param get_matches: A match function which takes ``g``, ``matchf`` and any ``**kwargs`` as inputs, + and returns a list of matches (MatchObjects). + :param heuristic: A heuristic function which takes in the graph, a MatchObject and any **kwargs, and outputs the score (a float) + of that match, and None if the match should be ignored. + :param update_matches: Function which updates the dictionary of matches. Should take as inputs: + - The graph before the rewrite was applied + - The graph after the rewrite was applied + - The dictionary containing the current matches + - The ``get_matches`` function + - The ``heuristic`` function + - ``matchf`` + - Any ``**kwargs`` + The function should then return the updated dictionary of matches. + :param rewrite: The rewrite function which accepts a match and performs the rewrite on the graph. + This is passed into ::func::`~pyzx.rules.apply_rule` + :param matchf: An optional filtering function for candidate vertices or edges. + If provided should return False if a candidate should not be considered for a match. Defaults to None. + :param condition: A function which accepts the graph after a rewrite has been applied as well as the associated ``MatchObject``. + Should return True if the condition is fufilled, defaults to a lambda function which always returns True. + :param num: The maximum number of successful rewrites to perform, defaults to -1. + :return: The number of successful rewrites performed. + """ + num_rewrites = 0 + matches = {} + for match in get_matches(g, matchf, **kwargs): + score = match_score(g=g, match=match, **kwargs) + if score is None: continue + matches[match] = score + while matches and num_rewrites != num: + match = max(matches, key=matches.__getitem__) + check_g = g.clone() + apply_rule(check_g, rewrite, [match]) + if condition(check_g, match): + num_rewrites += 1 + matches = update_matches(g, check_g, matches, get_matches, match_score, matchf, **kwargs) + g.replace(check_g) + continue + del matches[match] + return num_rewrites + +def flow_2Q_simp( + g: BaseGraph[VT, ET], + matchf: Union[Optional[Callable[[VT], bool]], Optional[Callable[[ET], bool]]] = None, + cFlow: bool = True, + rewrites: List[str] = ['id_fuse','lcomp','pivot'], + score_weights: List[float] = [1,1,1], + max_lc_unfusions: int = 2, + max_p_unfusions: int = 2 + ) -> int: + """Simplification strategy which aims to minimise the number of two qubit gates in the extracted circuit by selecting matches based on the heuristic |edges removed| - |vertices removed|. + See https://arxiv.org/abs/2312.02793 for details. + + :param g: The graph to be simplified, for optimal performance should be put into graph-like form prior using ::func::`to_graph_like`. + :param matchf: An optional filtering function for candidate vertices and edges. + If provided should return False if a candidate should not be considered for a match. Defaults to None. + :param cFlow: Whether the existence of a causal flow should be preserved throughout simplification, defaults to True + :param rewrites: Which rewrites to apply, defaults to ['id_fuse','lcomp','pivot'] + :param score_weights: Weighting factor for each of the three rewrites, defaults to [1,1,1] + :param max_lc_unfusions: Maximum number of neighbours to unfuse for local complementation, defaults to 2 + :param max_p_unfusions: Maximum number of neighbours to unfuse on each vertex for pivoting, defaults to 2 + :return: The number of succeessful rewrites performed. + """ + g.vertices_to_update = [] + + if cFlow: + flow_condition = lambda graph, match: True if match[2] else cflow(graph) is not None + else: + def flow_condition(graph, match): + if match[0] and len(match[0][2]) != 0: + return gflow(graph) is not None + if match[1] and (len(match[1][2][0]) != 0 or len(match[1][2][1]) != 0): + return gflow(graph) is not None + return True + + return selective_simp(g, match_2Q_simp, match_score_2Q_simp, update_2Q_simp_matches, rewrite_2Q_simp, matchf, flow_condition, rewrites=rewrites, score_weights=score_weights, max_lc_unfusions=max_lc_unfusions, max_p_unfusions=max_p_unfusions) #type:ignore + +def match_score_2Q_simp(g: BaseGraph[VT, ET], match: MatchUnfuseType, score_weights: List[float] = [1,1,1], **kwargs) -> Optional[float]: + """Function which returns the score for a ``UnfuseMatchType``.""" + if match[0]: return lcomp_2Q_simp_heuristic(g, match[0], score_weights[0]) + if match[1]: return pivot_2Q_simp_heuristic(g, match[1], score_weights[1]) + if match[2]: return id_fuse_2Q_reduce_heuristic(g, match[2], score_weights[2]) + +def update_2Q_simp_matches( + g_before: BaseGraph[VT, ET], + g_after: BaseGraph[VT, ET], + current_matches: Dict[MatchUnfuseType, float], + get_matches: Callable[..., List[MatchUnfuseType]], + match_score: Callable[..., Optional[float]], + matchf: Union[Optional[Callable[[VT], bool]], Optional[Callable[[ET], bool]]] = None, + **kwargs: Any + ) -> Dict[MatchUnfuseType, float]: + """Function which updates matches for ::func::`flow_2Q_simp`, rechecking any candidate vertices and edges which have been effected by the rewrite performed""" + before_vertices = set(g_before.vertices()) + after_vertices, after_edges = set(g_after.vertices()), set(g_after.edges()) + + removed_vertices = before_vertices - after_vertices + changed_vertices = {v for v in after_vertices if v in g_after.vertices_to_update or g_after.neighbors(v) != g_before.graph.get(v, {}).keys()} # type: ignore + + vertices_to_update = changed_vertices | {n for v in changed_vertices for n in g_after.neighbors(v)} + edges_to_update = {(v1,v2) for v1 in vertices_to_update for v2 in vertices_to_update if v1 < v2 and (v1, v2) in after_edges} + + matches_to_update = vertices_to_update | edges_to_update + update_matchf = lambda candidate: candidate in matches_to_update and (not matchf or matchf(candidate)) + new_matches = get_matches(g_after, update_matchf, **kwargs) + + updated_matches_dict = {} + for m, score in current_matches.items(): + if m[0] and m[0][0] in vertices_to_update | removed_vertices: continue + elif m[1] and (all(v in vertices_to_update for v in [m[1][0],m[1][1]]) or any(v in removed_vertices for v in [m[1][0],m[1][1]])): continue + elif m[2] and m[2][0] in vertices_to_update | removed_vertices: continue + updated_matches_dict[m] = score + + for m in new_matches: + m_score = match_score(g=g_after, match=m, **kwargs) + if m_score is None: continue + updated_matches_dict[m] = m_score + + g_after.vertices_to_update = [] + return updated_matches_dict + + +def to_gh(g: BaseGraph[VT,ET], quiet: bool = True) -> None: """Turns every red node into a green node by changing regular edges into hadamard edges""" ty = g.types() for v in g.vertices(): @@ -280,7 +451,7 @@ def to_gh(g: BaseGraph[VT,ET],quiet:bool=True) -> None: et = g.edge_type(e) g.set_edge_type(e, toggle_edge(et)) -def to_rg(g: BaseGraph[VT,ET], select:Optional[Callable[[VT],bool]]=None) -> None: +def to_rg(g: BaseGraph[VT,ET], select: Optional[Callable[[VT], bool]] = None) -> None: """Turn green nodes into red nodes by color-changing vertices which satisfy the predicate ``select``. By default, the predicate is set to greedily reducing the number of Hadamard-edges. :param g: A ZX-graph. @@ -298,18 +469,72 @@ def to_rg(g: BaseGraph[VT,ET], select:Optional[Callable[[VT],bool]]=None) -> Non for e in g.incident_edges(v): g.set_edge_type(e, toggle_edge(g.edge_type(e))) +def to_graph_like(g: BaseGraph[VT,ET], assert_bound_connections: bool = True) -> None: + """Puts a ZX-diagram in graph-like form. + The graph should contain no hboxes, only hadamard edges. Convert arity-2 hboxes to hadamard edges using ``hsimplify.from_hypergraph_form(g)``. + If ``assert_bound_connections`` is False, the conditions on inputs/output connections are not enforced.""" + + if any(g.type(v) == VertexType.H_BOX for v in g.vertices()): + raise ValueError("Graph contains hboxes. Try converting arity-2 hboxes to hadamard edges first by running hsimplify.from_hypergraph_form(g).") + + to_gh(g) + spider_simp(g, quiet=True) + + if not assert_bound_connections: return + + for b in [v for v in g.vertices() if g.type(v) == VertexType.BOUNDARY]: + for n in list(g.neighbors(b)): + if g.edge_type(g.edge(b,n)) == EdgeType.HADAMARD: + z = g.add_vertex(ty=VertexType.Z,row=0.5*g.row(n)+0.5*g.row(b),qubit=0.5*g.qubit(n)+0.5*g.qubit(b)) + g.add_edge(g.edge(b,z), edgetype=EdgeType.SIMPLE) + g.add_edge(g.edge(z,n), edgetype=EdgeType.HADAMARD) + g.remove_edge(g.edge(b,n)) + elif g.type(n) == VertexType.BOUNDARY: + z1 = g.add_vertex(ty=VertexType.Z,row=0.25*g.row(n)+0.75*g.row(b),qubit=0.25*g.qubit(n)+0.75*g.qubit(b)) + z2 = g.add_vertex(ty=VertexType.Z,row=0.5*g.row(n)+0.5*g.row(b),qubit=0.5*g.qubit(n)+0.5*g.qubit(b)) + z3 = g.add_vertex(ty=VertexType.Z,row=0.75*g.row(n)+0.25*g.row(b),qubit=0.75*g.qubit(n)+0.25*g.qubit(b)) + g.add_edge(g.edge(b,z1), edgetype=EdgeType.SIMPLE) + g.add_edge(g.edge(z1,z2), edgetype=EdgeType.HADAMARD) + g.add_edge(g.edge(z2,z3), edgetype=EdgeType.HADAMARD) + g.add_edge(g.edge(z3,n), edgetype=EdgeType.SIMPLE) + g.remove_edge(g.edge(b,n)) + + for v in [v for v in g.vertices() if g.type(v) == VertexType.Z]: + boundary_ns = [n for n in g.neighbors(v) if g.type(n)==VertexType.BOUNDARY] + if len(boundary_ns) <= 1: continue + for b in boundary_ns[:-1]: + z1 = g.add_vertex(ty=VertexType.Z,row=0.3*g.row(v)+0.7*g.row(b),qubit=0.3*g.qubit(v)+0.7*g.qubit(b)) + z2 = g.add_vertex(ty=VertexType.Z,row=0.7*g.row(v)+0.3*g.row(b),qubit=0.7*g.qubit(v)+0.3*g.qubit(b)) + g.add_edge(g.edge(b,z1), edgetype=EdgeType.SIMPLE) + g.add_edge(g.edge(z1,z2), edgetype=EdgeType.HADAMARD) + g.add_edge(g.edge(z2,v), edgetype=EdgeType.HADAMARD) + g.remove_edge(g.edge(b,v)) + + assert(is_graph_like(g)) + +def is_graph_like(g, assert_bound_connections: bool = True): + """Returns True if a ZX-diagram is graph-like. + If ``assert_bound_connections`` is False, the conditions on inputs/output connections are not enforced.""" + for v in g.vertices(): + if g.type(v) not in [VertexType.Z, VertexType.BOUNDARY]: return False + if assert_bound_connections and g.type(v) == VertexType.Z and len([n for n in g.neighbors(v) if g.type(n)==VertexType.BOUNDARY]) > 1: return False + + for e in g.edges(): + if not assert_bound_connections: + if g.type(e[0]) == g.type(e[1]) == VertexType.Z and g.edge_type(e) != EdgeType.HADAMARD: return False + elif g.edge_type(e) == EdgeType.SIMPLE and g.type(e[0]) == g.type(e[1]): return False + elif g.edge_type(e) == EdgeType.HADAMARD and (g.type(e[0]) != VertexType.Z or g.type(e[1]) != VertexType.Z): return False + + return True + def tcount(g: Union[BaseGraph[VT,ET], Circuit]) -> int: """Returns the amount of nodes in g that have a non-Clifford phase.""" - if isinstance(g, Circuit): - return g.tcount() - count = 0 + if isinstance(g, Circuit): return g.tcount() phases = g.phases() - for v in g.vertices(): - if phases[v]!=0 and phases[v].denominator > 2: - count += 1 - return count + return len([v for v in g.vertices() if phases[v] != 0 and phases[v].denominator > 2]) -#The functions below haven't been updated in a while. Use at your own risk. + +# THE FUNCTIONS BELOW HAVEN'T BEEN UPDATED IN A WHILE. USE AT YOUR OWN RISK. def simp_iter( g: BaseGraph[VT,ET], @@ -404,111 +629,6 @@ def full_reduce_iter(g: BaseGraph[VT,ET]) -> Iterator[Tuple[BaseGraph[VT,ET],str ok = True yield g, f"pivot_gadget -> {step}" -def is_graph_like(g: BaseGraph[VT,ET]) -> bool: - """Checks if a ZX-diagram is graph-like.""" - - # checks that all spiders are Z-spiders - for v in g.vertices(): - if g.type(v) not in [VertexType.Z, VertexType.BOUNDARY]: - return False - - for v1, v2 in itertools.combinations(g.vertices(), 2): - if not g.connected(v1, v2): - continue - - # Z-spiders are only connected via Hadamard edges - if g.type(v1) == VertexType.Z and g.type(v2) == VertexType.Z \ - and g.edge_type(g.edge(v1, v2)) != EdgeType.HADAMARD: - return False - - # FIXME: no parallel edges - - # no self-loops - for v in g.vertices(): - if g.connected(v, v): - return False - - # every I/O is connected to a Z-spider - bs = [v for v in g.vertices() if g.type(v) == VertexType.BOUNDARY] - for b in bs: - if g.vertex_degree(b) != 1 or g.type(list(g.neighbors(b))[0]) != VertexType.Z: - return False - - # every Z-spider is connected to at most one I/O - zs = [v for v in g.vertices() if g.type(v) == VertexType.Z] - for z in zs: - b_neighbors = [n for n in g.neighbors(z) if g.type(n) == VertexType.BOUNDARY] - if len(b_neighbors) > 1: - return False - - return True - - -def to_graph_like(g: BaseGraph[VT,ET]) -> None: - """Puts a ZX-diagram in graph-like form.""" - - # turn all red spiders into green spiders - to_gh(g) - - # simplify: remove excess HAD's, fuse along non-HAD edges, remove parallel edges and self-loops - spider_simp(g, quiet=True) - - # ensure all I/O are connected to a Z-spider - bs = [v for v in g.vertices() if g.type(v) == VertexType.BOUNDARY] - for v in bs: - - # if it's already connected to a Z-spider, continue on - if any([g.type(n) == VertexType.Z for n in g.neighbors(v)]): - continue - - # have to connect the (boundary) vertex to a Z-spider - ns = list(g.neighbors(v)) - for n in ns: - # every neighbor is another boundary or an H-Box - assert(g.type(n) in [VertexType.BOUNDARY, VertexType.H_BOX]) - if g.type(n) == VertexType.BOUNDARY: - z1 = g.add_vertex(ty=VertexType.Z) - z2 = g.add_vertex(ty=VertexType.Z) - z3 = g.add_vertex(ty=VertexType.Z) - g.remove_edge(g.edge(v, n)) - g.add_edge(g.edge(v, z1), edgetype=EdgeType.SIMPLE) - g.add_edge(g.edge(z1, z2), edgetype=EdgeType.HADAMARD) - g.add_edge(g.edge(z2, z3), edgetype=EdgeType.HADAMARD) - g.add_edge(g.edge(z3, n), edgetype=EdgeType.SIMPLE) - else: # g.type(n) == VertexType.H_BOX - z = g.add_vertex(ty=VertexType.Z) - g.remove_edge(g.edge(v, n)) - g.add_edge(g.edge(v, z), edgetype=EdgeType.SIMPLE) - g.add_edge(g.edge(z, n), edgetype=EdgeType.SIMPLE) - - # each Z-spider can only be connected to at most 1 I/O - vs = list(g.vertices()) - for v in vs: - if not g.type(v) == VertexType.Z: - continue - boundary_ns = [n for n in g.neighbors(v) if g.type(n) == VertexType.BOUNDARY] - if len(boundary_ns) <= 1: - continue - - # add dummy spiders for all but one - for b in boundary_ns[:-1]: - e = g.edge(v,b) - if g.edge_type(e) == EdgeType.SIMPLE: - z1 = g.add_vertex(ty=VertexType.Z,row=0.3*g.row(v)+0.7*g.row(b),qubit=0.3*g.qubit(v)+0.7*g.qubit(b)) - z2 = g.add_vertex(ty=VertexType.Z,row=0.7*g.row(v)+0.3*g.row(b),qubit=0.7*g.qubit(v)+0.3*g.qubit(b)) - - g.remove_edge(e) - g.add_edge(g.edge(z1, z2), edgetype=EdgeType.HADAMARD) - g.add_edge(g.edge(b, z1), edgetype=EdgeType.SIMPLE) - g.add_edge(g.edge(z2, v), edgetype=EdgeType.HADAMARD) - elif g.edge_type(e) == EdgeType.HADAMARD: - z = g.add_vertex(ty=VertexType.Z,row=0.5*g.row(v)+0.5*g.row(b),qubit=0.5*g.qubit(v)+0.5*g.qubit(b)) - g.remove_edge(e) - g.add_edge(g.edge(b,z),EdgeType.SIMPLE) - g.add_edge(g.edge(z,v),EdgeType.HADAMARD) - - assert(is_graph_like(g)) - def to_clifford_normal_form_graph(g: BaseGraph[VT,ET]) -> None: """Converts a graph that is Clifford into the form described by the right-hand side of eq. (11) of *Graph-theoretic Simplification of Quantum Circuits with the ZX-calculus* (https://arxiv.org/abs/1902.03178). diff --git a/demos/CNOT-Benchmark.ipynb b/scratchpads/CNOT-Benchmark.ipynb similarity index 100% rename from demos/CNOT-Benchmark.ipynb rename to scratchpads/CNOT-Benchmark.ipynb diff --git a/demos/T-count Benchmark.ipynb b/scratchpads/T-count Benchmark.ipynb similarity index 100% rename from demos/T-count Benchmark.ipynb rename to scratchpads/T-count Benchmark.ipynb diff --git a/tests/long_test.py b/tests/long_test.py index a3b10b6b..d8b9e3b5 100644 --- a/tests/long_test.py +++ b/tests/long_test.py @@ -77,7 +77,7 @@ def do_tests(qubits, depth, iterations, test_clifford_graph=True): #to_gh(g) #id_simp(g,quiet=True) #spider_simp(g,quiet=True) - g = teleport_reduce(g) + teleport_reduce(g) steps.append("teleport_reduce") compare(t,g, False) #c1 = zx.Circuit.from_graph(g,split_phases=True).to_basic_gates() diff --git a/tests/test_extract.py b/tests/test_extract.py index a3c267a3..402e7a38 100644 --- a/tests/test_extract.py +++ b/tests/test_extract.py @@ -18,21 +18,27 @@ import unittest import random import sys +import os +import json if __name__ == '__main__': sys.path.append('..') sys.path.append('.') +mydir = os.path.dirname(__file__) + try: import numpy as np from pyzx.tensor import tensorfy, compare_tensors except ImportError: np = None +from pyzx.graph import Graph from pyzx.circuit import Circuit from pyzx.circuit.gates import CNOT from pyzx.generate import cliffordT, cliffords from pyzx.simplify import clifford_simp -from pyzx.extract import extract_circuit +from pyzx.extract import extract_circuit, extract_simple +from pyzx.flow import cflow, full_cflow SEED = 1337 @@ -68,6 +74,15 @@ def test_cz_optimize_extract(self): self.assertTrue(cnot_count==4) self.assertTrue(c.verify_equality(c2)) + def test_extract_simple_phase_poly_synth(self): + with open(os.path.join(mydir,"test_phase_gadget_graph.json"), 'r') as file: + g_json = json.load(file) + g = Graph.from_json(g_json) + self.assertTrue(cflow(g) is None) + self.assertTrue(full_cflow(g) is not None) + c = extract_circuit(g.copy()) + c2 = extract_simple(g.copy(), synth_phase_polys=True) + self.assertTrue(c.verify_equality(c2)) if __name__ == '__main__': diff --git a/tests/test_phase_gadget_graph.json b/tests/test_phase_gadget_graph.json new file mode 100644 index 00000000..71c97cdf --- /dev/null +++ b/tests/test_phase_gadget_graph.json @@ -0,0 +1 @@ +"{\"wire_vertices\": {\"b0\": {\"annotation\": {\"boundary\": true, \"coord\": [-1, 0], \"input\": 0}}, \"b1\": {\"annotation\": {\"boundary\": true, \"coord\": [-1, -1], \"input\": 1}}, \"b2\": {\"annotation\": {\"boundary\": true, \"coord\": [-1, -2], \"input\": 2}}, \"b3\": {\"annotation\": {\"boundary\": true, \"coord\": [-1, -3], \"input\": 3}}, \"b4\": {\"annotation\": {\"boundary\": true, \"coord\": [6, 0], \"output\": 0}}, \"b5\": {\"annotation\": {\"boundary\": true, \"coord\": [6, -1], \"output\": 1}}, \"b6\": {\"annotation\": {\"boundary\": true, \"coord\": [6, -2], \"output\": 2}}, \"b7\": {\"annotation\": {\"boundary\": true, \"coord\": [6, -3], \"output\": 3}}}, \"node_vertices\": {\"v0\": {\"annotation\": {\"coord\": [0.5, 1]}, \"data\": {\"type\": \"Z\"}}, \"v1\": {\"annotation\": {\"coord\": [0.5, 2]}, \"data\": {\"type\": \"Z\", \"value\": \"\\u03c0\"}}, \"v2\": {\"annotation\": {\"coord\": [1.5, 1]}, \"data\": {\"type\": \"Z\"}}, \"v3\": {\"annotation\": {\"coord\": [1.5, 2]}, \"data\": {\"type\": \"Z\", \"value\": \"\\u03c0\"}}, \"v4\": {\"annotation\": {\"coord\": [1, 0]}, \"data\": {\"type\": \"Z\"}}, \"v5\": {\"annotation\": {\"coord\": [2.5, 1]}, \"data\": {\"type\": \"Z\"}}, \"v6\": {\"annotation\": {\"coord\": [2.5, 2]}, \"data\": {\"type\": \"Z\", \"value\": \"\\u03c0\"}}, \"v7\": {\"annotation\": {\"coord\": [2, -3]}, \"data\": {\"type\": \"Z\"}}, \"v8\": {\"annotation\": {\"coord\": [3.5, 1]}, \"data\": {\"type\": \"Z\"}}, \"v9\": {\"annotation\": {\"coord\": [3.5, 2]}, \"data\": {\"type\": \"Z\", \"value\": \"\\u03c0\"}}, \"v10\": {\"annotation\": {\"coord\": [3, -1]}, \"data\": {\"type\": \"Z\"}}, \"v11\": {\"annotation\": {\"coord\": [4.5, 1]}, \"data\": {\"type\": \"Z\"}}, \"v12\": {\"annotation\": {\"coord\": [4.5, 2]}, \"data\": {\"type\": \"Z\", \"value\": \"\\u03c0\"}}, \"v13\": {\"annotation\": {\"coord\": [5.5, 1]}, \"data\": {\"type\": \"Z\"}}, \"v14\": {\"annotation\": {\"coord\": [5.5, 2]}, \"data\": {\"type\": \"Z\", \"value\": \"\\u03c0\"}}, \"v15\": {\"annotation\": {\"coord\": [5, -2]}, \"data\": {\"type\": \"Z\"}}, \"v16\": {\"annotation\": {\"coord\": [-1, 1]}, \"data\": {\"type\": \"Z\"}}, \"v17\": {\"annotation\": {\"coord\": [-1, 1]}, \"data\": {\"type\": \"Z\"}}, \"v18\": {\"annotation\": {\"coord\": [-1, 1]}, \"data\": {\"type\": \"Z\"}}, \"v19\": {\"annotation\": {\"coord\": [-1, 1]}, \"data\": {\"type\": \"Z\"}}, \"v20\": {\"annotation\": {\"coord\": [-1, 1]}, \"data\": {\"type\": \"Z\"}}, \"v21\": {\"annotation\": {\"coord\": [-1, 1]}, \"data\": {\"type\": \"Z\"}}, \"v22\": {\"annotation\": {\"coord\": [-1, 1]}, \"data\": {\"type\": \"Z\"}}, \"v23\": {\"annotation\": {\"coord\": [-1, 1]}, \"data\": {\"type\": \"Z\"}}, \"v24\": {\"annotation\": {\"coord\": [0.5, 1.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v25\": {\"annotation\": {\"coord\": [1.75, 0.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v26\": {\"annotation\": {\"coord\": [2.75, -0.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v27\": {\"annotation\": {\"coord\": [1.5, 1.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v28\": {\"annotation\": {\"coord\": [1.25, 0.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v29\": {\"annotation\": {\"coord\": [1.75, 0.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v30\": {\"annotation\": {\"coord\": [2.25, 0.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v31\": {\"annotation\": {\"coord\": [2.75, 0.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v32\": {\"annotation\": {\"coord\": [3.25, 0.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v33\": {\"annotation\": {\"coord\": [0.0, 0.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v34\": {\"annotation\": {\"coord\": [2.5, 1.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v35\": {\"annotation\": {\"coord\": [2.25, -1.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v36\": {\"annotation\": {\"coord\": [2.75, -1.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v37\": {\"annotation\": {\"coord\": [0.5, -1.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v38\": {\"annotation\": {\"coord\": [3.5, 1.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v39\": {\"annotation\": {\"coord\": [3.25, 0.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v40\": {\"annotation\": {\"coord\": [3.75, 0.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v41\": {\"annotation\": {\"coord\": [4.25, 0.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v42\": {\"annotation\": {\"coord\": [1.0, 0.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v43\": {\"annotation\": {\"coord\": [4.5, 1.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v44\": {\"annotation\": {\"coord\": [5.5, 1.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v45\": {\"annotation\": {\"coord\": [5.25, -0.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v46\": {\"annotation\": {\"coord\": [2.0, -0.5]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v47\": {\"annotation\": {\"coord\": [-1.0, 1.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v48\": {\"annotation\": {\"coord\": [-1.0, 1.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v49\": {\"annotation\": {\"coord\": [-1.0, 1.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}, \"v50\": {\"annotation\": {\"coord\": [-1.0, 1.0]}, \"data\": {\"type\": \"hadamard\", \"is_edge\": \"true\"}}}, \"undir_edges\": {\"e0\": {\"src\": \"b0\", \"tgt\": \"v16\"}, \"e1\": {\"src\": \"b1\", \"tgt\": \"v20\"}, \"e2\": {\"src\": \"b2\", \"tgt\": \"v15\"}, \"e3\": {\"src\": \"b3\", \"tgt\": \"v18\"}, \"e4\": {\"src\": \"v0\", \"tgt\": \"v24\"}, \"e5\": {\"src\": \"v1\", \"tgt\": \"v24\"}, \"e6\": {\"src\": \"v0\", \"tgt\": \"v25\"}, \"e7\": {\"src\": \"v10\", \"tgt\": \"v25\"}, \"e8\": {\"src\": \"v0\", \"tgt\": \"v26\"}, \"e9\": {\"src\": \"v15\", \"tgt\": \"v26\"}, \"e10\": {\"src\": \"v2\", \"tgt\": \"v27\"}, \"e11\": {\"src\": \"v3\", \"tgt\": \"v27\"}, \"e12\": {\"src\": \"v2\", \"tgt\": \"v28\"}, \"e13\": {\"src\": \"v4\", \"tgt\": \"v28\"}, \"e14\": {\"src\": \"v4\", \"tgt\": \"v29\"}, \"e15\": {\"src\": \"v5\", \"tgt\": \"v29\"}, \"e16\": {\"src\": \"v4\", \"tgt\": \"v30\"}, \"e17\": {\"src\": \"v8\", \"tgt\": \"v30\"}, \"e18\": {\"src\": \"v4\", \"tgt\": \"b4\"}, \"e19\": {\"src\": \"v4\", \"tgt\": \"v31\"}, \"e20\": {\"src\": \"v11\", \"tgt\": \"v31\"}, \"e21\": {\"src\": \"v4\", \"tgt\": \"v32\"}, \"e22\": {\"src\": \"v13\", \"tgt\": \"v32\"}, \"e23\": {\"src\": \"v4\", \"tgt\": \"v33\"}, \"e24\": {\"src\": \"v17\", \"tgt\": \"v33\"}, \"e25\": {\"src\": \"v5\", \"tgt\": \"v34\"}, \"e26\": {\"src\": \"v6\", \"tgt\": \"v34\"}, \"e27\": {\"src\": \"v5\", \"tgt\": \"v35\"}, \"e28\": {\"src\": \"v7\", \"tgt\": \"v35\"}, \"e29\": {\"src\": \"v7\", \"tgt\": \"b7\"}, \"e30\": {\"src\": \"v7\", \"tgt\": \"v36\"}, \"e31\": {\"src\": \"v8\", \"tgt\": \"v36\"}, \"e32\": {\"src\": \"v7\", \"tgt\": \"v37\"}, \"e33\": {\"src\": \"v19\", \"tgt\": \"v37\"}, \"e34\": {\"src\": \"v8\", \"tgt\": \"v38\"}, \"e35\": {\"src\": \"v9\", \"tgt\": \"v38\"}, \"e36\": {\"src\": \"v8\", \"tgt\": \"v39\"}, \"e37\": {\"src\": \"v10\", \"tgt\": \"v39\"}, \"e38\": {\"src\": \"v10\", \"tgt\": \"v40\"}, \"e39\": {\"src\": \"v11\", \"tgt\": \"v40\"}, \"e40\": {\"src\": \"v10\", \"tgt\": \"b5\"}, \"e41\": {\"src\": \"v10\", \"tgt\": \"v41\"}, \"e42\": {\"src\": \"v13\", \"tgt\": \"v41\"}, \"e43\": {\"src\": \"v10\", \"tgt\": \"v42\"}, \"e44\": {\"src\": \"v21\", \"tgt\": \"v42\"}, \"e45\": {\"src\": \"v11\", \"tgt\": \"v43\"}, \"e46\": {\"src\": \"v12\", \"tgt\": \"v43\"}, \"e47\": {\"src\": \"v13\", \"tgt\": \"v44\"}, \"e48\": {\"src\": \"v14\", \"tgt\": \"v44\"}, \"e49\": {\"src\": \"v13\", \"tgt\": \"v45\"}, \"e50\": {\"src\": \"v15\", \"tgt\": \"v45\"}, \"e51\": {\"src\": \"v15\", \"tgt\": \"v46\"}, \"e52\": {\"src\": \"v23\", \"tgt\": \"v46\"}, \"e53\": {\"src\": \"b6\", \"tgt\": \"v22\"}, \"e54\": {\"src\": \"v16\", \"tgt\": \"v47\"}, \"e55\": {\"src\": \"v17\", \"tgt\": \"v47\"}, \"e56\": {\"src\": \"v18\", \"tgt\": \"v48\"}, \"e57\": {\"src\": \"v19\", \"tgt\": \"v48\"}, \"e58\": {\"src\": \"v20\", \"tgt\": \"v49\"}, \"e59\": {\"src\": \"v21\", \"tgt\": \"v49\"}, \"e60\": {\"src\": \"v22\", \"tgt\": \"v50\"}, \"e61\": {\"src\": \"v23\", \"tgt\": \"v50\"}}, \"variable_types\": {}, \"scalar\": \"{\\\"power2\\\": 0, \\\"phase\\\": \\\"0\\\"}\"}" \ No newline at end of file diff --git a/tests/test_simplify.py b/tests/test_simplify.py index e11c8052..8cfe710f 100644 --- a/tests/test_simplify.py +++ b/tests/test_simplify.py @@ -35,6 +35,7 @@ from pyzx.generate import cliffordT from pyzx.simplify import * from pyzx.simplify import supplementarity_simp +from pyzx.extract import extract_simple SEED = 1337 @@ -66,6 +67,9 @@ def test_spider_simp(self): def test_id_simp(self): self.func_test(id_simp) + + def test_id_fuse_simp(self): + self.func_test(id_fuse_simp) def test_to_gh(self): self.func_test(to_gh) @@ -96,11 +100,51 @@ def test_supplementarity_simp(self): def test_teleport_reduce(self): """Tests whether teleport_reduce preserves semantics on a set of circuits that have been broken before.""" + for i,s in enumerate([qasm_1,qasm_2,qasm_3,qasm_4]): + with self.subTest(i=i): + c = qasm(s) + g = c.to_graph() + teleport_reduce(g) + c2 = Circuit.from_graph(g) + self.assertTrue(c.verify_equality(c2)) + + def test_teleport_reduce_phase_storing(self): + """Tests whether teleport_reduce preserves semantics with phases being stored then randomly placed.""" + for i,s in enumerate([qasm_1,qasm_2,qasm_3,qasm_4]): + with self.subTest(i=i): + c = qasm(s) + g = c.to_graph() + teleport_reduce(g, store=True) + for group, vertices in list(g.group_data.items()): + v = random.choice(list(vertices)) # Choose a random vertex to place the phase on + phase = g.phase_sum[group] * g.phase_mult[v] + child_v = g.leaf_vertex(v) + g.add_to_phase(child_v, phase) + c2 = Circuit.from_graph(g) + self.assertTrue(c.verify_equality(c2)) + + def test_flow_opt(self): + """Tests whether flow_2Q_simp preserves semantics.""" + for i,g in enumerate(self.circuits): + with self.subTest(i=i): + c = Circuit.from_graph(g) + teleport_reduce(g) + to_graph_like(g, assert_bound_connections=False) + flow_2Q_simp(g, cFlow=True, rewrites=['id_fuse','lcomp','pivot'], max_lc_unfusions=2, max_p_unfusions=2) + c2 = extract_simple(g, up_to_perm=False).to_basic_gates() + self.assertTrue(c.verify_equality(c2)) + + def test_flow_opt_with_phase_jumping(self): + """Tests whether flow_2Q_simp preserves semantics when phase jumping is allowed.""" for i,s in enumerate([qasm_1,qasm_2,qasm_3,qasm_4]): with self.subTest(i=i): c = qasm(s) g = c.to_graph() - c2 = Circuit.from_graph(teleport_reduce(g)) + teleport_reduce(g, store=True) + to_graph_like(g, assert_bound_connections=False) + flow_2Q_simp(g, cFlow=True, rewrites=['id_fuse','lcomp','pivot'], max_lc_unfusions=2, max_p_unfusions=2) + g.place_tracked_phases() + c2 = extract_simple(g, up_to_perm=False).to_basic_gates() self.assertTrue(c.verify_equality(c2)) def test_to_graph_like_introduce_boundary_vertices(self):