From 8f06cf926a45e3fcd89ca00e1de09f96e683e0d8 Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Thu, 4 Jul 2024 19:01:18 +0900 Subject: [PATCH 01/13] Remove standalone and qobj --- .github/workflows/build.yml | 109 ----- CMakeLists.txt | 147 +------ CONTRIBUTING.md | 118 +----- contrib/standalone/qasm_simulator.cpp | 203 --------- contrib/standalone/version.hpp.in | 17 - qiskit_aer/__init__.py | 2 +- qiskit_aer/backends/aer_compiler.py | 6 +- qiskit_aer/backends/aer_simulator.py | 35 +- qiskit_aer/backends/aerbackend.py | 236 +---------- qiskit_aer/backends/backend_utils.py | 80 ++-- qiskit_aer/backends/qasm_simulator.py | 41 +- qiskit_aer/backends/statevector_simulator.py | 53 +-- qiskit_aer/backends/unitary_simulator.py | 59 +-- .../wrappers/aer_controller_binding.hpp | 8 - qiskit_aer/jobs/__init__.py | 3 - qiskit_aer/jobs/aerjob.py | 45 +- qiskit_aer/jobs/utils.py | 134 ------ qiskit_aer/noise/noise_model.py | 2 +- ...move-standalone-qobj-0fc0f7ca479634f4.yaml | 5 + src/controllers/aer_controller.hpp | 54 +-- src/controllers/controller_execute.hpp | 6 - src/controllers/state_controller.hpp | 1 - src/framework/operations.hpp | 2 +- src/framework/pybind_json.hpp | 10 +- src/framework/qobj.hpp | 201 --------- test/CMakeLists.txt | 25 -- test/__init__.py | 15 - test/asv.linux.conf.json | 191 --------- test/asv.linux.cuda.conf.json | 191 --------- test/data/qobj_snapshot_expval_matrix.json | 57 --- test/data/qobj_snapshot_expval_pauli.json | 48 --- test/data/qobj_snapshot_probs.json | 24 -- test/data/qobj_snapshot_statevector.json | 21 - test/src/test_linalg.cpp | 401 ------------------ test/src/test_snapshot.cpp | 44 -- test/src/test_snapshot_bdd.cpp | 49 --- test/src/utils.hpp | 128 ------ .../backends/aer_simulator/test_circuit.py | 23 - test/terra/backends/simulator_test_case.py | 18 +- ..._qobj.py => test_parameterized_circuit.py} | 85 +--- .../backends/test_runtime_parameterization.py | 83 ---- test/terra/extensions/test_wrappers.py | 25 -- tools/generate_qobj.py | 76 ---- 43 files changed, 84 insertions(+), 2997 deletions(-) delete mode 100644 contrib/standalone/qasm_simulator.cpp delete mode 100644 contrib/standalone/version.hpp.in create mode 100644 releasenotes/notes/remove-standalone-qobj-0fc0f7ca479634f4.yaml delete mode 100644 src/framework/qobj.hpp delete mode 100644 test/CMakeLists.txt delete mode 100644 test/__init__.py delete mode 100644 test/asv.linux.conf.json delete mode 100644 test/asv.linux.cuda.conf.json delete mode 100644 test/data/qobj_snapshot_expval_matrix.json delete mode 100644 test/data/qobj_snapshot_expval_pauli.json delete mode 100644 test/data/qobj_snapshot_probs.json delete mode 100644 test/data/qobj_snapshot_statevector.json delete mode 100644 test/src/test_linalg.cpp delete mode 100644 test/src/test_snapshot.cpp delete mode 100644 test/src/test_snapshot_bdd.cpp delete mode 100644 test/src/utils.hpp rename test/terra/backends/{test_parameterized_qobj.py => test_parameterized_circuit.py} (82%) delete mode 100755 tools/generate_qobj.py diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 40c9cb4e5f..f68d120eb8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -8,117 +8,8 @@ concurrency: group: ${{ github.repository }}-${{ github.ref }}-${{ github.head_ref }}-${{ github.workflow }} cancel-in-progress: true jobs: - standalone: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: ["macos-13", "ubuntu-latest", "windows-2019"] - steps: - - uses: actions/checkout@v4 - - name: Set up Python '3.10' - uses: actions/setup-python@v5 - with: - python-version: '3.10' - - name: Install deps - run: pip install "conan<2.0.0" - - name: Install openblas - run: | - set -e - sudo apt-get update - sudo apt-get install -y libopenblas-dev - shell: bash - if: runner.os == 'Linux' - - name: Add msbuild to PATH - uses: microsoft/setup-msbuild@v2 - if: runner.os == 'Windows' - - name: Compile Standalone Windows - run: | - set -e - mkdir out; cd out; cmake .. -DBUILD_TESTS=1 - cmake --build . --config Release - shell: bash - if: runner.os == 'Windows' - - name: Compile Standalone - run: | - set -e - mkdir out; cd out; cmake .. -DBUILD_TESTS=1 - make - shell: bash - if: runner.os != 'Windows' - - name: Run Unit Tests - run: | - cd out/bin - for test in test* - do echo $test - if ! ./$test - then - ERR=1 - fi - done - if [ ! -z "$ERR" ] - then - exit 1 - fi - shell: bash - - name: Run qobj - run: | - pip install -U qiskit - python tools/generate_qobj.py - cd out - Release/qasm_simulator ../qobj.json | python ../tools/verify_standalone_results.py - shell: bash - mpi_standalone: - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: ["ubuntu-latest"] - steps: - - uses: actions/checkout@v4 - - name: Set up Python '3.10' - uses: actions/setup-python@v5 - with: - python-version: '3.10' - - name: Install deps - run: pip install "conan<2.0.0" - - name: Install openblas and mpi - run: | - set -e - sudo apt-get update - sudo apt-get install -y libopenblas-dev openmpi-bin libopenmpi-dev - shell: bash - - name: Compile Standalone - run: | - set -e - mkdir out; cd out; cmake .. -DBUILD_TESTS=1 -DAER_MPI=True - make - shell: bash - - name: Run Unit Tests with mpi - run: | - cd out/bin - for test in test* - do echo $test - if ! /usr/bin/mpirun.openmpi -host localhost:2 -np 2 ./$test - then - ERR=1 - fi - done - if [ ! -z "$ERR" ] - then - exit 1 - fi - shell: bash - - name: Run qobj - run: | - pip install -U qiskit - python tools/generate_qobj.py - cd out - /usr/bin/mpirun.openmpi -host localhost:2 -np 2 Release/qasm_simulator ../qobj.json | python ../tools/verify_standalone_results.py - env: - USE_MPI: 1 - shell: bash wheel: runs-on: ${{ matrix.os }} - needs: ["standalone"] strategy: matrix: os: ["macos-13", "ubuntu-latest", "windows-2019"] diff --git a/CMakeLists.txt b/CMakeLists.txt index 424185d7cc..b2b754b5d6 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -95,8 +95,6 @@ include(dependency_utils) # Get version information get_version(${VERSION_NUM}) -configure_file("${PROJECT_SOURCE_DIR}/contrib/standalone/version.hpp.in" - "${PROJECT_SOURCE_DIR}/contrib/standalone/version.hpp") set(AER_SIMULATOR_CPP_SRC_DIR "${PROJECT_SOURCE_DIR}/src") set(AER_SIMULATOR_CPP_EXTERNAL_LIBS @@ -543,151 +541,8 @@ set(AER_LIBRARIES ${CMAKE_DL_LIBS}) set(AER_COMPILER_DEFINITIONS ${AER_COMPILER_DEFINITIONS} ${CONAN_DEFINES}) -if(SKBUILD) # Terra Addon build - add_subdirectory(qiskit_aer/backends/wrappers) -else() # Standalone build - set(AER_LIBRARIES - ${AER_LIBRARIES} - ${THRUST_DEPENDANT_LIBS} - ${MPI_DEPENDANT_LIBS}) - - function(build_cuda target src_file is_exec) - if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "AMD64" OR CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL "amd64") - if (NOT CMAKE_OSX_ARCHITECTURES STREQUAL "arm64") - # We build SIMD filed separately, because they will be reached only if the - # machine running the code has SIMD support - set(SIMD_SOURCE_FILE "${PROJECT_SOURCE_DIR}/src/simulators/statevector/qv_avx2.cpp") - endif() - endif() - set_source_files_properties(${SIMD_SOURCE_FILE} PROPERTIES LANGUAGE CUDA) - set_source_files_properties(${src_file} PROPERTIES LANGUAGE CUDA) - set_source_files_properties(${src_file} PROPERTIES COMPILE_FLAGS "${CUDA_NVCC_FLAGS}") - if(DEFINED SIMD_FLAGS_LIST) - nvcc_add_compiler_options_list("${SIMD_FLAGS_LIST}" SIMD_FLAGS) - set_source_files_properties(${SIMD_SOURCE_FILE} PROPERTIES COMPILE_FLAGS "${CUDA_NVCC_FLAGS} ${SIMD_FLAGS}") - endif() - if(${is_exec}) - add_executable(${target} ${src_file} ${SIMD_SOURCE_FILE}) - else() - add_library(${target} ${src_file} ${SIMD_SOURCE_FILE}) - endif() - target_link_libraries(${target} ${AER_LIBRARIES}) - string(STRIP ${AER_COMPILER_FLAGS} AER_COMPILER_FLAGS_STRIPPED) - nvcc_add_compiler_options(${AER_COMPILER_FLAGS_STRIPPED} AER_COMPILER_FLAGS_OUT) - - set_target_properties(${target} PROPERTIES - LINKER_LANGUAGE CXX - CXX_STANDARD 14 - COMPILE_FLAGS ${AER_COMPILER_FLAGS_OUT} - LINK_FLAGS ${AER_LINKER_FLAGS} - RUNTIME_OUTPUT_DIRECTORY_DEBUG Debug - RUNTIME_OUTPUT_DIRECTORY_RELEASE Release) - endfunction() - - function(build_rocm target src_file is_exec) - # ROCm is only supported in x86_64 devices so it should be safe to leverage AVX2. - set(SIMD_SOURCE_FILE "${PROJECT_SOURCE_DIR}/src/simulators/statevector/qv_avx2.cpp") - - set_source_files_properties( - ${SIMD_SOURCE_FILE} - ${src_file} - PROPERTIES LANGUAGE CXX) - - if(${is_exec}) - add_executable(${target} ${src_file} ${SIMD_SOURCE_FILE}) - else() - add_library(${target} ${src_file} ${SIMD_SOURCE_FILE}) - endif() - - target_compile_options(${target} PRIVATE ${ROCM_EXTRA_FLAGS} ${SIMD_FLAGS_LIST}) - target_compile_definitions(${target} PRIVATE ${ROCM_EXTRA_DEFS} ${AER_COMPILER_DEFINITIONS}) - - target_link_libraries(${target} PRIVATE ${AER_LIBRARIES}) - - set_target_properties(${target} PROPERTIES - LINKER_LANGUAGE CXX - CXX_STANDARD 14 - COMPILE_FLAGS ${AER_COMPILER_FLAGS} - LINK_FLAGS ${AER_LINKER_FLAGS} - RUNTIME_OUTPUT_DIRECTORY_DEBUG Debug - RUNTIME_OUTPUT_DIRECTORY_RELEASE Release) - endfunction() - - function(build_cpu target src_file is_exec) - if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "AMD64" OR CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL "amd64") - if (NOT CMAKE_OSX_ARCHITECTURES STREQUAL "arm64") - # We build SIMD filed separately, because they will be reached only if the - # machine running the code has SIMD support - set(SIMD_SOURCE_FILE "${PROJECT_SOURCE_DIR}/src/simulators/statevector/qv_avx2.cpp") - endif() - endif() - string(REPLACE ";" " " SIMD_FLAGS "${SIMD_FLAGS_LIST}") - set_source_files_properties(${SIMD_SOURCE_FILE} PROPERTIES COMPILE_FLAGS "${SIMD_FLAGS}") - if(${is_exec}) - add_executable(${target} ${src_file} ${SIMD_SOURCE_FILE}) - else() - add_library(${target} SHARED ${src_file} ${SIMD_SOURCE_FILE}) - endif() - target_link_libraries(${target} PRIVATE ${AER_LIBRARIES}) - set_target_properties(${target} PROPERTIES - LINKER_LANGUAGE CXX - CXX_STANDARD 14 - COMPILE_FLAGS ${AER_COMPILER_FLAGS} - LINK_FLAGS ${AER_LINKER_FLAGS} - RUNTIME_OUTPUT_DIRECTORY_DEBUG Debug - RUNTIME_OUTPUT_DIRECTORY_RELEASE Release) - - target_include_directories(${target} - PRIVATE ${AER_SIMULATOR_CPP_SRC_DIR} - PRIVATE ${AER_SIMULATOR_CPP_EXTERNAL_LIBS}) - target_compile_definitions(${target} - PRIVATE ${AER_COMPILER_DEFINITIONS}) - if(WIN32 AND NOT AER_BLAS_LIB_PATH) - add_custom_command(TARGET ${target} POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_if_different - ${BACKEND_REDIST_DEPS} - $) - install(FILES ${BACKEND_REDIST_DEPS} DESTINATION bin) - endif() - endfunction() - - # build qasm_simulator - set(AER_SIMULATOR_SOURCE "${PROJECT_SOURCE_DIR}/contrib/standalone/qasm_simulator.cpp") - set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) - if(CUDA_FOUND AND AER_THRUST_BACKEND STREQUAL "CUDA") - build_cuda(qasm_simulator ${AER_SIMULATOR_SOURCE} TRUE) - elseif(HIP_FOUND AND AER_THRUST_BACKEND STREQUAL "ROCM") - build_rocm(qasm_simulator ${AER_SIMULATOR_SOURCE} TRUE) - else() - build_cpu(qasm_simulator ${AER_SIMULATOR_SOURCE} TRUE) - endif() - - install(TARGETS qasm_simulator DESTINATION bin) - if (CMAKE_SYSTEM_NAME STREQUAL "Linux" OR CMAKE_SYSTEM_NAME STREQUAL "Darwin") - set(AER_RUNTIME_SOURCE "${PROJECT_SOURCE_DIR}/contrib/runtime/aer_runtime.cpp") - if(CUDA_FOUND AND AER_THRUST_BACKEND STREQUAL "CUDA") - build_cuda(aer ${AER_RUNTIME_SOURCE} FALSE) - elseif(HIP_FOUND AND AER_THRUST_BACKEND STREQUAL "ROCM") - build_rocm(aer ${AER_RUNTIME_SOURCE} FALSE) - else() - build_cpu(aer ${AER_RUNTIME_SOURCE} FALSE) - endif() - install(TARGETS aer) - - # Tests - if(BUILD_TESTS AND NOT AER_MPI) - add_executable(test_libaer "${PROJECT_SOURCE_DIR}/test/runtime/runtime_sample.c") - target_include_directories(test_libaer PUBLIC "${PROJECT_SOURCE_DIR}/contrib/runtime/") - # AER_LINKER_FLAGS carry eventual OpenMP linking flags. - set_target_properties(test_libaer PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE bin - LINK_FLAGS ${AER_LINKER_FLAGS}) - target_link_libraries(test_libaer PRIVATE ${AER_LIBRARIES}) - target_link_libraries(test_libaer PRIVATE aer) - add_test(NAME aer_runtime_test COMMAND bin/test_libaer) - endif() - endif() - -endif() +add_subdirectory(qiskit_aer/backends/wrappers) # Tests if(BUILD_TESTS) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 514d2c369b..b8dd7c8706 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -292,7 +292,6 @@ repository. This is useful for building from source offline, or to reuse the installed package dependencies. -If we are only building the standalone version and do not want to install all Python requirements you can just install **Conan**: $ pip install conan @@ -352,11 +351,6 @@ Ubuntu #### Build -There are two ways of building `Aer` simulators, depending on your goal: - -1. Build a Python extension that works with Terra. -2. Build a standalone executable. - **Python extension** As any other Python package, we can install from source code by just running: @@ -404,36 +398,6 @@ we install those dependencies outside the regular setuptools *mechanism*. If you of these packages set the environment variable DISABLE_DEPENDENCY_INSTALL (ON or 1). -**Standalone Executable** - -If you want to build a standalone executable, you have to use *CMake* directly. -The preferred way *CMake* is meant to be used, is by setting up an "out of -source" build. So in order to build your standalone executable, you have to follow -these steps: - - qiskit-aer$ mkdir out - qiskit-aer$ cd out - qiskit-aer/out$ cmake .. - qiskit-aer/out$ cmake --build . --config Release -- -j4 - -Once built, you will have your standalone executable into the `Release/` or -`Debug/` directory (depending on the type of building chosen with the `--config` -option): - - qiskit-aer/out$ cd Release - qiskit-aer/out/Release/$ ls - qasm_simulator - - -**Advanced options** - -Because the standalone version of `Aer` doesn't need Python at all, the build system is -based on CMake, just like most of other C++ projects. So to pass all the different -options we have on `Aer` to CMake, we use its native mechanism: - - qiskit-aer/out$ cmake -DCMAKE_CXX_COMPILER=g++-9 -DAER_BLAS_LIB_PATH=/path/to/my/blas .. - - ### macOS #### Dependencies @@ -451,11 +415,6 @@ You further need to have *Xcode Command Line Tools* installed on macOS: #### Build -There are two ways of building `Aer` simulators, depending on your goal: - -1. Build a Python extension that works with Terra; -2. Build a standalone executable. - **Python extension** As any other Python package, we can install from source code by just running: @@ -501,35 +460,6 @@ As we are using *scikit-build* and we need some *Python* dependencies to be pres we install those dependencies outside the regular setuptools *mechanism*. If you want to avoid automatic installation of these packages set the environment variable DISABLE_DEPENDENCY_INSTALL (ON or 1). -**Standalone Executable** - -If you want to build a standalone executable, you have to use **CMake** directly. -The preferred way **CMake** is meant to be used, is by setting up an "out of -source" build. So in order to build your standalone executable, you have to follow -these steps: - - qiskit-aer$ mkdir out - qiskit-aer$ cd out - qiskit-aer/out$ cmake .. - qiskit-aer/out$ cmake --build . --config Release -- -j4 - -Once built, you will have your standalone executable into the `Release/` or -`Debug/` directory (depending on the type of building chosen with the `--config` -option): - - qiskit-aer/out$ cd Release - qiskit-aer/out/Release/$ ls - qasm_simulator - -***Advanced options*** - -Because the standalone version of `Aer` doesn't need Python at all, the build system is -based on CMake, just like most of other C++ projects. So to pass all the different -options we have on `Aer` to CMake, we use its native mechanism: - - qiskit-aer/out$ cmake -DCMAKE_CXX_COMPILER=g++-9 -DAER_BLAS_LIB_PATH=/path/to/my/blas .. - - ### Windows @@ -604,34 +534,6 @@ As we are using *scikit-build* and we need some *Python* dependencies to be pres we install those dependencies outside the regular setuptools *mechanism*. If you want to avoid automatic installation of these packages set the environment variable DISABLE_DEPENDENCY_INSTALL (ON or 1). -**Standalone Executable** - -If you want to build a standalone executable, you have to use **CMake** directly. -The preferred way **CMake** is meant to be used, is by setting up an "out of -source" build. So in order to build our standalone executable, you have to follow -these steps: - - (QiskitDevEnv) qiskit-aer> mkdir out - (QiskitDevEnv) qiskit-aer> cd out - (QiskitDevEnv) qiskit-aer\out> cmake .. - (QiskitDevEnv) qiskit-aer\out> cmake --build . --config Release -- -j4 - -Once built, you will have your standalone executable into the `Release/` or -`Debug/` directory (depending on the type of building chosen with the `--config` -option): - - (QiskitDevEnv) qiskit-aer\out> cd Release - (QiskitDevEnv) qiskit-aer\out\Release> dir - qasm_simulator - -***Advanced options*** - -Because the standalone version of `Aer` doesn't need Python at all, the build system is -based on CMake, just like most of other C++ projects. So to pass all the different -options we have on `Aer` to CMake, we use its native mechanism: - - (QiskitDevEnv) qiskit-aer\out> cmake -G "Visual Studio 15 2017" -DAER_BLAS_LIB_PATH=c:\path\to\my\blas .. - ### Building with GPU support @@ -714,21 +616,7 @@ on the `thrust` library. This enables Aer to run on AMD® GPUs, including the AMD® Instinct GPU line based on the CDNA architecture. ROCm® only support linux platforms. -To build the standalone version, the following should be sufficient: - -``` -cmake -G Ninja \ - -DCMAKE_INSTALL_PREFIX= \ - -DSKBUILD=FALSE \ - -DAER_THRUST_BACKEND=ROCM \ - -DAER_MPI= \ - -DAER_ROCM_ARCH= \ - -DCMAKE_BUILD_TYPE=Release \ - -DBUILD_TESTS=True -ninja install -``` -Alternatively, and possibly preferred for most use cases, you can create a Python -wheel file that you can install as part of your Python environemnt: +You can create a Python wheel file that you can install as part of your Python environemnt: ``` cd @@ -1106,10 +994,6 @@ create the wheel file: qiskit-aer$> python ./setup.py bdist_wheel --build-type=Debug -If you want to debug the standalone executable, the parameter changes to: - - qiskit-aer/out$> cmake -DCMAKE_BUILD_TYPE=Debug - There are three different build configurations: `Release`, `Debug`, and `Release with Debug Symbols`, whose parameters are: `Release`, `Debug`, `RelWithDebInfo` respectively. diff --git a/contrib/standalone/qasm_simulator.cpp b/contrib/standalone/qasm_simulator.cpp deleted file mode 100644 index 0067374b8c..0000000000 --- a/contrib/standalone/qasm_simulator.cpp +++ /dev/null @@ -1,203 +0,0 @@ -/** - * This code is part of Qiskit. - * - * (C) Copyright IBM 2018, 2019. - * - * This code is licensed under the Apache License, Version 2.0. You may - * obtain a copy of this license in the LICENSE.txt file in the root directory - * of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. - * - * Any modifications or derivative works of this code must retain this - * copyright notice, and modified files need to carry a notice indicating - * that they have been altered from the originals. - */ - -// #define DEBUG // Uncomment for verbose debugging output -#include -#include -#include -#ifdef AER_MPI -#include -#endif - -#include "version.hpp" -// Simulator -#include "controllers/aer_controller.hpp" - -/******************************************************************************* - * - * EXIT CODES: - * - * 0: The Qobj was succesfully executed. - * Returns full result JSON. - * - * 1: Command line invalid or Qobj JSON cannot be loaded. - * Returns JSON: - * {"success": false, "status": "ERROR: Invalid input (error msg)"} - * - * 2: Qobj failed to load or execute. - * Returns JSON: - * {"success": false, "status": "ERROR: Failed to execute qobj (error msg)"} - * - * 3: At least one experiment in Qobj failed to execute successfully. - * Returns parial result JSON with failed experiments returning: - * "{"success": false, "status": "ERROR: error msg"} - * - ******************************************************************************/ - -enum class CmdArguments { SHOW_VERSION, INPUT_CONFIG, INPUT_DATA }; - -inline CmdArguments parse_cmd_options(const std::string &argv) { - if (argv == "-v" || argv == "--version") - return CmdArguments::SHOW_VERSION; - - if (argv == "-c" || argv == "--config") - return CmdArguments::INPUT_CONFIG; - - return CmdArguments::INPUT_DATA; -} - -inline void show_version() { - std::cout << "Qiskit Aer: " << AER_MAJOR_VERSION << "." << AER_MINOR_VERSION - << "." << AER_PATCH_VERSION << "\n"; -} - -inline void failed(const std::string &msg, std::ostream &o = std::cout, - int indent = -1) { - json_t ret; - ret["success"] = false; - ret["status"] = std::string("ERROR: ") + msg; - o << ret.dump(indent) << std::endl; -} - -inline void usage(const std::string &command, std::ostream &out) { - failed("Invalid command line", out); - // Print usage message - std::cerr << "\n\n"; - show_version(); - std::cerr << "\n"; - std::cerr << "Usage: \n"; - std::cerr << command << " [-v] [-c ] \n"; - std::cerr << " -v : Show version\n"; - std::cerr << " -c : Configuration file\n"; - ; - std::cerr << " file : qobj file\n"; -} - -int main(int argc, char **argv) { - - std::ostream &out = std::cout; // output stream - int indent = 4; - json_t qobj; - json_t config; - int myrank = 0; - - std::cerr << "The standalone simulator is deprecated as of Qiskit 0.14" - << std::endl; - std::cerr << "and will be removed no sooner than 3 months from that release." - << std::endl; - std::cerr << "Please run the simulator from Python using AerSimulator.run()" - << std::endl; - std::cerr << "with qiskit.circuit" << std::endl; - -#ifdef AER_MPI - int prov; - int nprocs = 1; - MPI_Init_thread(&argc, &argv, MPI_THREAD_MULTIPLE, &prov); - MPI_Comm_size(MPI_COMM_WORLD, &nprocs); - MPI_Comm_rank(MPI_COMM_WORLD, &myrank); -#endif - - if (argc == 1) { // NOLINT - usage(std::string(argv[0]), out); // NOLINT - return 1; - } - - // Parse command line options - for (auto pos = 1UL; pos < static_cast(argc); ++pos) { // NOLINT - auto option = parse_cmd_options(std::string(argv[pos])); // NOLINT - switch (option) { - case CmdArguments::SHOW_VERSION: - show_version(); - return 0; - case CmdArguments::INPUT_CONFIG: - if (++pos == static_cast(argc)) { - failed("Invalid config (no file is specified.)", out, indent); - return 1; - } - try { - config = JSON::load(std::string(argv[pos])); - } catch (std::exception &e) { - std::string msg = "Invalid config (" + std::string(e.what()) + ")"; - failed(msg, out, indent); - return 1; - } - break; - case CmdArguments::INPUT_DATA: - try { - qobj = JSON::load(std::string(argv[pos])); // NOLINT - pos = argc; // Exit from the loop - } catch (std::exception &e) { - std::string msg = "Invalid input (" + std::string(e.what()) + ")"; - failed(msg, out, indent); - return 1; - } - break; - } - } - - // Execute simulation - try { - - // Check for command line config - // and if present add to qobj config - json_t &config_all = qobj["config"]; - if (!config.empty()) // NOLINT - config_all.update(config.begin(), config.end()); - - // Remap legacy method names - std::string method; - JSON::get_value(method, "method", config_all); - if (method == "statevector_gpu") { - config_all["method"] = "statevector"; - config_all["device"] = "GPU"; - } else if (method == "density_matrix_gpu") { - config_all["method"] = "density_matrix"; - config_all["device"] = "GPU"; - } - - // Initialize simulator - AER::Controller sim; - auto result = sim.execute(qobj).to_json(); - if (myrank == 0) { - out << result.dump(4) << std::endl; - } - - // Check if execution was successful. - bool success = false; - std::string status; - JSON::get_value(success, "success", result); - JSON::get_value(status, "status", result); - if (!success) { -#ifdef AER_MPI - MPI_Finalize(); -#endif - if (status == "COMPLETED") - return 3; // The simulation was was completed unsuccesfully. - return 2; // Failed to execute the Qobj - } - } catch (std::exception &e) { - std::stringstream msg; - msg << "Failed to execute qobj (" << e.what() << ")"; - failed(msg.str(), out, indent); -#ifdef AER_MPI - MPI_Finalize(); -#endif - return 2; - } -#ifdef AER_MPI - MPI_Finalize(); -#endif - - return 0; -} // end main diff --git a/contrib/standalone/version.hpp.in b/contrib/standalone/version.hpp.in deleted file mode 100644 index 8a583de4d7..0000000000 --- a/contrib/standalone/version.hpp.in +++ /dev/null @@ -1,17 +0,0 @@ -/** - * This code is part of Qiskit. - * - * (C) Copyright IBM 2018, 2019. - * - * This code is licensed under the Apache License, Version 2.0. You may - * obtain a copy of this license in the LICENSE.txt file in the root directory - * of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. - * - * Any modifications or derivative works of this code must retain this - * copyright notice, and modified files need to carry a notice indicating - * that they have been altered from the originals. - */ - -#define AER_MAJOR_VERSION ${MAJOR_VERSION} -#define AER_MINOR_VERSION ${MINOR_VERSION} -#define AER_PATCH_VERSION ${PATCH_VERSION} \ No newline at end of file diff --git a/qiskit_aer/__init__.py b/qiskit_aer/__init__.py index ae552a3e40..82d0fd363a 100644 --- a/qiskit_aer/__init__.py +++ b/qiskit_aer/__init__.py @@ -67,7 +67,7 @@ # pylint: disable=wrong-import-position from qiskit_aer.aerprovider import AerProvider -from qiskit_aer.jobs import AerJob, AerJobSet +from qiskit_aer.jobs import AerJob from qiskit_aer.aererror import AerError from qiskit_aer.backends import * from qiskit_aer import library diff --git a/qiskit_aer/backends/aer_compiler.py b/qiskit_aer/backends/aer_compiler.py index f1a8079d24..5803262084 100644 --- a/qiskit_aer/backends/aer_compiler.py +++ b/qiskit_aer/backends/aer_compiler.py @@ -41,8 +41,6 @@ from qiskit.transpiler import PassManager from qiskit.transpiler.passes import Decompose - -from qiskit.qobj import QobjExperimentHeader from qiskit_aer.aererror import AerError from qiskit_aer.noise import NoiseModel @@ -62,7 +60,7 @@ AerConfig, ) -from .backend_utils import circuit_optypes +from .backend_utils import circuit_optypes, CircuitHeader from ..library.control_flow_instructions import AerMark, AerJump, AerStore @@ -680,7 +678,7 @@ def assemble_circuit(circuit: QuantumCircuit, basis_gates=None): for inst in circuit.data ) - header = QobjExperimentHeader( + header = CircuitHeader( n_qubits=num_qubits, qreg_sizes=qreg_sizes, memory_slots=num_memory, diff --git a/qiskit_aer/backends/aer_simulator.py b/qiskit_aer/backends/aer_simulator.py index 096974d234..5c258f1f3a 100644 --- a/qiskit_aer/backends/aer_simulator.py +++ b/qiskit_aer/backends/aer_simulator.py @@ -25,7 +25,6 @@ from .aerbackend import AerBackend, AerError from .backend_utils import ( cpp_execute_circuits, - cpp_execute_qobj, available_methods, available_devices, MAX_QUBITS_STATEVECTOR, @@ -225,7 +224,7 @@ class AerSimulator(AerBackend): maximum will be set to the number of CPU cores (Default: 0). * ``max_parallel_experiments`` (int): Sets the maximum number of - qobj experiments that may be executed in parallel up to the + experiments that may be executed in parallel up to the max_parallel_threads value. If set to 1 parallel circuit execution will be disabled. If set to 0 the maximum will be automatically set to max_parallel_threads (Default: 1). @@ -685,7 +684,7 @@ class AerSimulator(AerBackend): "open_pulse": False, "memory": True, "max_shots": int(1e6), - "description": "A C++ QasmQobj simulator with noise", + "description": "A C++ Qasm simulator with noise", "coupling_map": None, "basis_gates": BASIS_GATES["automatic"], "custom_instructions": _CUSTOM_INSTR["automatic"], @@ -923,17 +922,6 @@ def _execute_circuits(self, aer_circuits, noise_model, config): ret = cpp_execute_circuits(self._controller, aer_circuits, noise_model, config) return ret - def _execute_qobj(self, qobj): - """Execute a qobj on the backend. - - Args: - qobj (QasmQobj): simulator input. - - Returns: - dict: return a dictionary of results. - """ - return cpp_execute_qobj(self._controller, qobj) - def set_option(self, key, value): if key == "custom_instructions": self._set_configuration_option(key, value) @@ -970,25 +958,6 @@ def set_option(self, key, value): if value != "CPU": self.name += f"_{value}".lower() - def _validate(self, qobj): - """Semantic validations of the qobj which cannot be done via schemas. - - Warn if no measure or save instructions in run circuits. - """ - for experiment in qobj.experiments: - # If circuit does not contain measurement or save - # instructions raise a warning - no_data = True - for op in experiment.instructions: - if op.name == "measure" or op.name[:5] == "save_": - no_data = False - break - if no_data: - logger.warning( - 'No measure or save instruction in circuit "%s": results will be empty.', - experiment.header.name, - ) - def _basis_gates(self): """Return simualtor basis gates. diff --git a/qiskit_aer/backends/aerbackend.py b/qiskit_aer/backends/aerbackend.py index 968ead84a0..985f4dcce4 100644 --- a/qiskit_aer/backends/aerbackend.py +++ b/qiskit_aer/backends/aerbackend.py @@ -27,11 +27,10 @@ from qiskit.providers import convert_to_target from qiskit.providers.models import BackendStatus from qiskit.pulse import Schedule, ScheduleBlock -from qiskit.qobj import QasmQobj, PulseQobj from qiskit.result import Result from qiskit.transpiler import CouplingMap from ..aererror import AerError -from ..jobs import AerJob, AerJobSet, split_qobj +from ..jobs import AerJob from ..noise.noise_model import NoiseModel, QuantumErrorLocation from ..noise.errors.base_quantum_error import QuantumChannelInstruction from .aer_compiler import compile_circuit, assemble_circuits, generate_aer_config @@ -151,7 +150,7 @@ def run(self, circuits, validate=False, parameter_binds=None, **run_options): Args: circuits (QuantumCircuit or list): The QuantumCircuit (or list of QuantumCircuit objects) to run - validate (bool): validate the Qobj before running (default: False). + validate (bool): validate before running (default: False). parameter_binds (list): A list of parameter binding dictionaries. See additional information (default: None). run_options (kwargs): additional run time backend options. @@ -160,7 +159,7 @@ def run(self, circuits, validate=False, parameter_binds=None, **run_options): AerJob: The simulation job. Raises: - TypeError: If ``parameter_binds`` is specified with a qobj input or + TypeError: If ``parameter_binds`` is specified with an input or has a length mismatch with the number of circuits. Additional Information: @@ -185,67 +184,7 @@ def run(self, circuits, validate=False, parameter_binds=None, **run_options): if isinstance(circuits, (QuantumCircuit, Schedule, ScheduleBlock)): circuits = [circuits] - if isinstance(circuits, (QasmQobj, PulseQobj)): - warnings.warn( - "Using a qobj for run() is deprecated as of qiskit-aer 0.14" - " and will be removed no sooner than 3 months from that release" - " date. Transpiled circuits should now be passed directly using" - " `backend.run(circuits, **run_options).", - DeprecationWarning, - stacklevel=2, - ) - if parameter_binds: - raise TypeError("Parameter binds can't be used with an input qobj") - # A work around to support both qobj options and run options until - # qobj is deprecated is to copy all the set qobj.config fields into - # run_options that don't override existing fields. This means set - # run_options fields will take precidence over the value for those - # fields that are set via assemble. - if not run_options: - run_options = circuits.config.__dict__ - else: - run_options = copy.copy(run_options) - for key, value in circuits.config.__dict__.items(): - if key not in run_options and value is not None: - run_options[key] = value - if "parameter_binds" in run_options: - parameter_binds = run_options.pop("parameter_binds") - return self._run_qobj(circuits, validate, parameter_binds, **run_options) - - only_circuits = True - only_pulse = True - for circ in circuits: - only_circuits &= isinstance(circ, QuantumCircuit) - only_pulse &= isinstance(circ, (ScheduleBlock, Schedule)) - - if only_circuits and not only_pulse: - if validate: - raise TypeError( - "bad input to run() function;" - "`validation` argument is only effective for input qobj" - ) - - executor = run_options.get("executor", None) - if executor is None and "executor" in self.options.__dict__: - executor = self.options.__dict__.get("executor", None) - if executor: - # This path remains for DASK execution to split a qobj insttance - # into sub-qobj instances. This will be replaced with _run_circuits path - # in the near releases - return self._run_qobj(circuits, validate, parameter_binds, **run_options) - else: - return self._run_circuits(circuits, parameter_binds, **run_options) - elif not only_circuits and only_pulse: - return self._run_qobj(circuits, validate, parameter_binds, **run_options) - elif not only_circuits and not only_pulse: - raise TypeError( - "bad input to run() function;" - "circuits and schedules cannot be mixed in a single run" - ) - else: - raise TypeError( - "bad input to run() function; circuits must be either circuits or schedules" - ) + return self._run_circuits(circuits, parameter_binds, **run_options) def _run_circuits(self, circuits, parameter_binds, **run_options): """Run circuits by generating native circuits.""" @@ -263,49 +202,6 @@ def _run_circuits(self, circuits, parameter_binds, **run_options): return aer_job - # pylint: disable=arguments-differ - def _run_qobj(self, circuits, validate=False, parameter_binds=None, **run_options): - """Run circuits by assembling qobj.""" - qobj = self._assemble(circuits, parameter_binds=parameter_binds, **run_options) - - # Optional validation - if validate: - self._validate(qobj) - - # Get executor from qobj config and delete attribute so qobj can still be serialized - executor = getattr(qobj.config, "executor", None) - if hasattr(qobj.config, "executor"): - delattr(qobj.config, "executor") - - # Optionally split the job - experiments = split_qobj( - qobj, - max_size=getattr(qobj.config, "max_job_size", None), - max_shot_size=getattr(qobj.config, "max_shot_size", None), - ) - - # Temporarily remove any executor from options so that job submission - # can work with Dask client executors which can't be pickled - opts_executor = getattr(self._options, "executor", None) - if hasattr(self._options, "executor"): - self._options.executor = None - - # Submit job - job_id = str(uuid.uuid4()) - if isinstance(experiments, list): - aer_job = AerJobSet(self, job_id, self._execute_qobj_job, experiments, executor) - else: - aer_job = AerJob( - self, job_id, self._execute_qobj_job, qobj=experiments, executor=executor - ) - aer_job.submit() - - # Restore removed executor after submission - if hasattr(self._options, "executor"): - self._options.executor = opts_executor - - return aer_job - def configuration(self): """Return the simulator backend configuration. @@ -376,71 +272,6 @@ def status(self): status_msg="", ) - def _execute_qobj_job(self, qobj, job_id="", format_result=True): - """Run a qobj job""" - # Start timer - start = time.time() - - # Take metadata from headers of experiments to work around JSON serialization error - metadata_list = [] - metadata_index = 0 - for expr in qobj.experiments: - if hasattr(expr.header, "metadata"): - metadata_copy = expr.header.metadata.copy() - metadata_list.append(metadata_copy) - expr.header.metadata.clear() - if "id" in metadata_copy: - expr.header.metadata["id"] = metadata_copy["id"] - expr.header.metadata["metadata_index"] = metadata_index - metadata_index += 1 - - # Run simulation - output = self._execute_qobj(qobj) - - # Recover metadata - metadata_index = 0 - for expr in qobj.experiments: - if hasattr(expr.header, "metadata"): - expr.header.metadata.clear() - expr.header.metadata.update(metadata_list[metadata_index]) - metadata_index += 1 - - # Validate output - if not isinstance(output, dict): - logger.error("%s: simulation failed.", self.name) - if output: - logger.error("Output: %s", output) - raise AerError("simulation terminated without returning valid output.") - - # Format results - output["job_id"] = job_id - output["date"] = datetime.datetime.now().isoformat() - output["backend_name"] = self.name - output["backend_version"] = self.configuration().backend_version - - # Push metadata to experiment headers - for result in output["results"]: - if ( - "header" in result - and "metadata" in result["header"] - and "metadata_index" in result["header"]["metadata"] - ): - metadata_index = result["header"]["metadata"]["metadata_index"] - result["header"]["metadata"] = metadata_list[metadata_index] - - # Add execution time - output["time_taken"] = time.time() - start - - # Display warning if simulation failed - if not output.get("success", False): - msg = "Simulation failed" - if "status" in output: - msg += f" and returned the following error message:\n{output['status']}" - logger.warning(msg) - if format_result: - return self._format_results(output) - return output - def _execute_circuits_job( self, circuits, parameter_binds, run_options, job_id="", format_result=True ): @@ -535,49 +366,6 @@ def _compile(self, circuits, **run_options): return circuits, noise_model - def _assemble(self, circuits, parameter_binds=None, **run_options): - """Assemble one or more Qobj for running on the simulator""" - - if isinstance(circuits, (QasmQobj, PulseQobj)): - qobj = circuits - else: - # compile and insert noise injection points - circuits, noise_model = self._compile(circuits, **run_options) - - # If noise model exists, add it to the run options - if noise_model: - run_options["noise_model"] = noise_model - - if parameter_binds: - # Handle parameter binding - parameterizations = self._convert_binds(circuits, parameter_binds) - qobj = None - for circuit in circuits: - assemble_bind = {param: 1 for param in circuit.parameters} - qobj_tmp = assemble( - [circuit], - backend=self, - parameter_binds=[assemble_bind], - parameterizations=parameterizations, - ) - if qobj: - qobj.experiments.append(qobj_tmp.experiments[0]) - else: - qobj = qobj_tmp - else: - qobj = assemble(circuits, backend=self) - - # Add options - for key, val in self.options.__dict__.items(): - if val is not None: - setattr(qobj.config, key, val) - - # Override with run-time options - for key, val in run_options.items(): - setattr(qobj.config, key, val) - - return qobj - def _assemble_noise_model(self, circuits, optypes, **run_options): """Move quantum error instructions from circuits to noise model""" # Make a shallow copy so we can modify list elements if required @@ -651,18 +439,6 @@ def _get_executor(self, **run_options): else: return getattr(self._options, "executor", None) - @abstractmethod - def _execute_qobj(self, qobj): - """Execute a qobj on the backend. - - Args: - qobj (QasmQobj or PulseQobj): simulator input. - - Returns: - dict: return a dictionary of results. - """ - pass - @abstractmethod def _execute_circuits(self, aer_circuits, noise_model, config): """Execute aer circuits on the backend. @@ -677,10 +453,6 @@ def _execute_circuits(self, aer_circuits, noise_model, config): """ pass - def _validate(self, qobj): - """Validate the qobj for the backend""" - pass - def set_option(self, key, value): """Special handling for setting backend options. diff --git a/qiskit_aer/backends/backend_utils.py b/qiskit_aer/backends/backend_utils.py index af66c8c8bd..04dfe6c3be 100644 --- a/qiskit_aer/backends/backend_utils.py +++ b/qiskit_aer/backends/backend_utils.py @@ -17,9 +17,10 @@ import os from math import log2 +from types import SimpleNamespace + import psutil from qiskit.circuit import QuantumCircuit -from qiskit.qobj import QasmQobjInstruction from qiskit.result import ProbDistribution from qiskit.quantum_info import Clifford @@ -436,15 +437,6 @@ ) -def cpp_execute_qobj(controller, qobj): - """Execute qobj on C++ controller wrapper""" - - # Location where we put external libraries that will be - # loaded at runtime by the simulator extension - qobj.config.library_dir = LIBRARY_DIR - return controller(qobj) - - def cpp_execute_circuits(controller, aer_circuits, noise_model, config): """Execute aer circuits on C++ controller wrapper""" @@ -476,25 +468,6 @@ def available_devices(controller): return tuple(dev) -def add_final_save_instruction(qobj, state): - """Add final save state instruction to all experiments in a qobj.""" - - def save_inst(num_qubits): - """Return n-qubit save statevector inst""" - return QasmQobjInstruction( - name=f"save_{state}", - qubits=list(range(num_qubits)), - label=f"{state}", - snapshot_type="single", - ) - - for exp in qobj.experiments: - num_qubits = exp.config.n_qubits - exp.instructions.append(save_inst(num_qubits)) - - return qobj - - def add_final_save_op(aer_circuits, state): """Add final save state op to all experiments in a qobj.""" @@ -505,14 +478,6 @@ def add_final_save_op(aer_circuits, state): return aer_circuits -def map_legacy_method_options(qobj): - """Map legacy method names of qasm simulator to aer simulator options""" - method = getattr(qobj.config, "method", None) - if method in LEGACY_METHOD_MAP: - qobj.config.method, qobj.config.device = LEGACY_METHOD_MAP[method] - return qobj - - def map_legacy_method_config(config): """Map legacy method names of qasm simulator to aer simulator options""" method = config.method @@ -562,3 +527,44 @@ def circuit_optypes(circuit): optypes.update(type(inst).mro()) optypes.discard(object) return optypes + + +class CircuitHeader(SimpleNamespace): + """A class used to represent a dictionary header in circuit objects.""" + + def __init__(self, **kwargs): + """Instantiate a new circuit dict field object. + + Args: + kwargs: arbitrary keyword arguments that can be accessed as + attributes of the object. + """ + self.__dict__.update(kwargs) + + def to_dict(self): + """Return a dictionary format representation of the circuit. + + Returns: + dict: The dictionary form of the CircuitHeader. + """ + return self.__dict__ + + @classmethod + def from_dict(cls, data): + """Create a new header object from a dictionary. + + Args: + data (dict): A dictionary representing the header to create. It + will be in the same format as output by :func:`to_dict`. + + Returns: + CircuitHeader: The CircuitHeader from the input dictionary. + """ + + return cls(**data) + + def __eq__(self, other): + if isinstance(other, self.__class__): + if self.__dict__ == other.__dict__: + return True + return False diff --git a/qiskit_aer/backends/qasm_simulator.py b/qiskit_aer/backends/qasm_simulator.py index 174d201da1..7952d2f1a4 100644 --- a/qiskit_aer/backends/qasm_simulator.py +++ b/qiskit_aer/backends/qasm_simulator.py @@ -24,12 +24,10 @@ from ..aererror import AerError from .aerbackend import AerBackend from .backend_utils import ( - cpp_execute_qobj, cpp_execute_circuits, available_methods, MAX_QUBITS_STATEVECTOR, LEGACY_METHOD_MAP, - map_legacy_method_options, map_legacy_method_config, ) @@ -161,7 +159,7 @@ class QasmSimulator(AerBackend): maximum will be set to the number of CPU cores (Default: 0). * ``max_parallel_experiments`` (int): Sets the maximum number of - qobj experiments that may be executed in parallel up to the + experiments that may be executed in parallel up to the max_parallel_threads value. If set to 1 parallel circuit execution will be disabled. If set to 0 the maximum will be automatically set to max_parallel_threads (Default: 1). @@ -403,7 +401,7 @@ class QasmSimulator(AerBackend): "open_pulse": False, "memory": True, "max_shots": int(1e6), - "description": "A C++ QasmQobj simulator with noise", + "description": "A C++ Qasm simulator with noise", "coupling_map": None, "basis_gates": _DEFAULT_BASIS_GATES, "custom_instructions": _DEFAULT_CUSTOM_INSTR, @@ -580,18 +578,6 @@ def available_devices(self): """Return the available simulation methods.""" return copy.copy(self._AVAILABLE_DEVICES) - def _execute_qobj(self, qobj): - """Execute a qobj on the backend. - - Args: - qobj (QasmQobj): simulator input. - - Returns: - dict: return a dictionary of results. - """ - qobj = map_legacy_method_options(qobj) - return cpp_execute_qobj(self._controller, qobj) - def _execute_circuits(self, aer_circuits, noise_model, config): """Execute circuits on the backend.""" config = map_legacy_method_config(config) @@ -615,29 +601,6 @@ def set_option(self, key, value): if key in ["method", "noise_model", "basis_gates"]: self._cached_basis_gates = self._basis_gates() - def _validate(self, qobj): - """Semantic validations of the qobj which cannot be done via schemas. - - Warn if no measurements in circuit with classical registers. - """ - for experiment in qobj.experiments: - # If circuit contains classical registers but not - # measurements raise a warning - if experiment.config.memory_slots > 0: - # Check if measure opts missing - no_measure = True - for op in experiment.instructions: - if not no_measure: - break # we don't need to check any more ops - if no_measure and op.name == "measure": - no_measure = False - # Print warning if clbits but no measure - if no_measure: - logger.warning( - 'No measurements in circuit "%s": count data will return all zeros.', - experiment.header.name, - ) - def _basis_gates(self): """Return simualtor basis gates. diff --git a/qiskit_aer/backends/statevector_simulator.py b/qiskit_aer/backends/statevector_simulator.py index 63fe03cdc3..7bf293cc98 100644 --- a/qiskit_aer/backends/statevector_simulator.py +++ b/qiskit_aer/backends/statevector_simulator.py @@ -25,13 +25,10 @@ from ..version import __version__ from .aerbackend import AerBackend from .backend_utils import ( - cpp_execute_qobj, available_devices, MAX_QUBITS_STATEVECTOR, LEGACY_METHOD_MAP, - add_final_save_instruction, cpp_execute_circuits, - map_legacy_method_options, map_legacy_method_config, add_final_save_op, ) @@ -107,7 +104,7 @@ class StatevectorSimulator(AerBackend): maximum will be set to the number of CPU cores (Default: 0). * ``max_parallel_experiments`` (int): Sets the maximum number of - qobj experiments that may be executed in parallel up to the + experiments that may be executed in parallel up to the max_parallel_threads value. If set to 1 parallel circuit execution will be disabled. If set to 0 the maximum will be automatically set to max_parallel_threads (Default: 1). @@ -327,56 +324,8 @@ def available_devices(self): """Return the available simulation methods.""" return copy.copy(self._AVAILABLE_DEVICES) - def _execute_qobj(self, qobj): - """Execute a qobj on the backend. - - Args: - qobj (QasmQobj): simulator input. - - Returns: - dict: return a dictionary of results. - """ - # Make deepcopy so we don't modify the original qobj - qobj = copy.deepcopy(qobj) - qobj = add_final_save_instruction(qobj, "statevector") - qobj = map_legacy_method_options(qobj) - return cpp_execute_qobj(self._controller, qobj) - def _execute_circuits(self, aer_circuits, noise_model, config): """Execute circuits on the backend.""" config = map_legacy_method_config(config) aer_circuits = add_final_save_op(aer_circuits, "statevector") return cpp_execute_circuits(self._controller, aer_circuits, noise_model, config) - - def _validate(self, qobj): - """Semantic validations of the qobj which cannot be done via schemas. - Some of these may later move to backend schemas. - - 1. Set shots=1. - 2. Check number of qubits will fit in local memory. - """ - name = self.name - if getattr(qobj.config, "noise_model", None) is not None: - raise AerError(f"{name} does not support noise.") - - n_qubits = qobj.config.n_qubits - max_qubits = self.configuration().n_qubits - if n_qubits > max_qubits: - raise AerError( - f"Number of qubits ({n_qubits}) is greater than max ({max_qubits}) " - f'for "{name}" with {int(psutil.virtual_memory().total / (1024**3))} GB system memory.' - ) - - if qobj.config.shots != 1: - logger.info('"%s" only supports 1 shot. Setting shots=1.', name) - qobj.config.shots = 1 - - for experiment in qobj.experiments: - exp_name = experiment.header.name - if getattr(experiment.config, "shots", 1) != 1: - logger.info( - '"%s" only supports 1 shot. Setting shots=1 for circuit "%s".', - name, - exp_name, - ) - experiment.config.shots = 1 diff --git a/qiskit_aer/backends/unitary_simulator.py b/qiskit_aer/backends/unitary_simulator.py index ef02150fe9..555af9a581 100644 --- a/qiskit_aer/backends/unitary_simulator.py +++ b/qiskit_aer/backends/unitary_simulator.py @@ -26,13 +26,10 @@ from ..version import __version__ from .aerbackend import AerBackend from .backend_utils import ( - cpp_execute_qobj, cpp_execute_circuits, available_devices, MAX_QUBITS_STATEVECTOR, LEGACY_METHOD_MAP, - add_final_save_instruction, - map_legacy_method_options, add_final_save_op, map_legacy_method_config, ) @@ -94,7 +91,7 @@ class UnitarySimulator(AerBackend): * ``max_shot_size`` (int or None): If the number of shots with a noise model exceeds this value, simulation will split the experiments into - sub experiments in the qobj. If ``None`` simulator does nothing (Default: None). + sub experiments. If ``None`` simulator does nothing (Default: None). * ``"initial_unitary"`` (matrix_like): Sets a custom initial unitary matrix for the simulation instead of identity (Default: None). @@ -111,7 +108,7 @@ class UnitarySimulator(AerBackend): maximum will be set to the number of CPU cores (Default: 0). * ``"max_parallel_experiments"`` (int): Sets the maximum number of - qobj experiments that may be executed in parallel up to the + experiments that may be executed in parallel up to the max_parallel_threads value. If set to 1 parallel circuit execution will be disabled. If set to 0 the maximum will be automatically set to max_parallel_threads (Default: 1). @@ -313,60 +310,8 @@ def available_devices(self): """Return the available simulation methods.""" return copy.copy(self._AVAILABLE_DEVICES) - def _execute_qobj(self, qobj): - """Execute a qobj on the backend. - - Args: - qobj (QasmQobj): simulator input. - - Returns: - dict: return a dictionary of results. - """ - # Make deepcopy so we don't modify the original qobj - qobj = copy.deepcopy(qobj) - qobj = add_final_save_instruction(qobj, "unitary") - qobj = map_legacy_method_options(qobj) - return cpp_execute_qobj(self._controller, qobj) - def _execute_circuits(self, aer_circuits, noise_model, config): """Execute circuits on the backend.""" config = map_legacy_method_config(config) aer_circuits = add_final_save_op(aer_circuits, "unitary") return cpp_execute_circuits(self._controller, aer_circuits, noise_model, config) - - def _validate(self, qobj): - """Semantic validations of the qobj which cannot be done via schemas. - Some of these may later move to backend schemas. - 1. Set shots=1 - 2. No measurements or reset - 3. Check number of qubits will fit in local memory. - """ - name = self.name - if getattr(qobj.config, "noise_model", None) is not None: - raise AerError(f"{name} does not support noise.") - - n_qubits = qobj.config.n_qubits - max_qubits = self.configuration().n_qubits - if n_qubits > max_qubits: - raise AerError( - f"Number of qubits ({n_qubits}) is greater than " - f'max ({max_qubits}) for "{name}" with ' - f"{int(psutil.virtual_memory().total / (1024**3))} GB system memory." - ) - if qobj.config.shots != 1: - logger.info('"%s" only supports 1 shot. Setting shots=1.', name) - qobj.config.shots = 1 - for experiment in qobj.experiments: - exp_name = experiment.header.name - if getattr(experiment.config, "shots", 1) != 1: - logger.info( - '"%s" only supports 1 shot. Setting shots=1 for circuit "%s".', - name, - exp_name, - ) - experiment.config.shots = 1 - for operation in experiment.instructions: - if operation.name in ["measure", "reset"]: - raise AerError( - f"Unsupported {name} instruction {operation.name} in circuit {exp_name}" - ) diff --git a/qiskit_aer/backends/wrappers/aer_controller_binding.hpp b/qiskit_aer/backends/wrappers/aer_controller_binding.hpp index 997d38adf7..02a5516f8c 100644 --- a/qiskit_aer/backends/wrappers/aer_controller_binding.hpp +++ b/qiskit_aer/backends/wrappers/aer_controller_binding.hpp @@ -42,13 +42,6 @@ template class ControllerExecutor { public: ControllerExecutor() = default; - py::object operator()(const py::handle &qobj) { -#ifdef TEST_JSON // Convert input qobj to json to test standalone data reading - return AerToPy::to_python(controller_execute(json_t(qobj))); -#else - return AerToPy::to_python(controller_execute(qobj)); -#endif - } py::object execute(std::vector> &circuits, Noise::NoiseModel &noise_model, @@ -91,7 +84,6 @@ void bind_aer_controller(MODULE m) { py::class_> aer_ctrl(m, "aer_controller_execute"); aer_ctrl.def(py::init<>()); - aer_ctrl.def("__call__", &ControllerExecutor::operator()); aer_ctrl.def("__reduce__", [aer_ctrl](const ControllerExecutor &self) { return py::make_tuple(aer_ctrl, py::tuple()); diff --git a/qiskit_aer/jobs/__init__.py b/qiskit_aer/jobs/__init__.py index e929458f40..e7ee51a10b 100644 --- a/qiskit_aer/jobs/__init__.py +++ b/qiskit_aer/jobs/__init__.py @@ -29,10 +29,7 @@ :toctree: ../stubs/ AerJob - AerJobSet """ from .aerjob import AerJob -from .aerjobset import AerJobSet -from .utils import split_qobj diff --git a/qiskit_aer/jobs/aerjob.py b/qiskit_aer/jobs/aerjob.py index 5c5b8243fe..4c662ba8f8 100644 --- a/qiskit_aer/jobs/aerjob.py +++ b/qiskit_aer/jobs/aerjob.py @@ -32,7 +32,6 @@ def __init__( backend, job_id, fn, - qobj=None, circuits=None, parameter_binds=None, run_options=None, @@ -46,13 +45,9 @@ def __init__( fn(function): a callable function to execute qobj on backend. This should usually be a bound :meth:`AerBackend._run()` method, with the signature `(qobj: QasmQobj, job_id: str) -> Result`. - qobj(QasmQobj): qobj to execute circuits(list of QuantumCircuit): circuits to execute. - If `qobj` is set, this argument is ignored. parameter_binds(list): parameters for circuits. - If `qobj` is set, this argument is ignored. run_options(dict): run_options to execute. - If `qobj` is set, this argument is ignored. executor(ThreadPoolExecutor or dask.distributed.client): The executor to be used to submit the job. @@ -61,18 +56,9 @@ def __init__( """ super().__init__(backend, job_id) self._fn = fn - if qobj: - self._qobj = qobj - self._circuits = None - self._parameter_binds = None - self._run_options = None - elif circuits: - self._qobj = None - self._circuits = circuits - self._parameter_binds = parameter_binds - self._run_options = run_options - else: - raise JobError("AerJob needs a qobj or circuits") + self._circuits = circuits + self._parameter_binds = parameter_binds + self._run_options = run_options self._executor = executor or DEFAULT_EXECUTOR self._future = None @@ -86,12 +72,9 @@ def submit(self): """ if self._future is not None: raise JobError("Aer job has already been submitted.") - if self._qobj: - self._future = self._executor.submit(self._fn, self._qobj, self._job_id) - else: - self._future = self._executor.submit( - self._fn, self._circuits, self._parameter_binds, self._run_options, self._job_id - ) + self._future = self._executor.submit( + self._fn, self._circuits, self._parameter_binds, self._run_options, self._job_id + ) @requires_submit def result(self, timeout=None): @@ -148,22 +131,6 @@ def backend(self): """Return the instance of the backend used for this job.""" return self._backend - def qobj(self): - """Return the Qobj submitted for this job. - - Returns: - Qobj: the Qobj submitted for this job. - """ - warnings.warn( - "`AerJob.qobj() is deprecated as of qiskit-aer 0.14`. " - "Using a qobj for `backend.run()` is deprecated as of qiskit-aer 0.14" - " and will be removed no sooner than 3 months from that release" - " date. Once it is removed, this `qobj()` returns always `None`.", - DeprecationWarning, - stacklevel=2, - ) - return self._qobj - def circuits(self): """Return the list of QuantumCircuit submitted for this job. diff --git a/qiskit_aer/jobs/utils.py b/qiskit_aer/jobs/utils.py index ad016d1f0c..83a3970391 100644 --- a/qiskit_aer/jobs/utils.py +++ b/qiskit_aer/jobs/utils.py @@ -16,7 +16,6 @@ from math import ceil from functools import singledispatch, update_wrapper, wraps from concurrent.futures import ThreadPoolExecutor -from qiskit.qobj import QasmQobj, PulseQobj from qiskit.providers import JobError @@ -59,69 +58,6 @@ def wrapper(*args, **kw): return wrapper -def _copy_qobj_for_noise(qobj, max_shot_size, qobj_id): - num_shot_jobs, shot_mod = divmod(qobj.config.shots, max_shot_size) - qobj_list = [] - - if shot_mod == 0 and num_shot_jobs == 1: - return qobj - - if shot_mod > 0: - qobj.config.shots = shot_mod - for experiment in qobj.experiments: - _id = str(uuid.uuid4()) - experiment.header.metadata["id"] = _id - qobj_list.append(qobj) - - if num_shot_jobs > 1: - _qid = qobj_id or str(uuid.uuid4()) - _config = copy.copy(qobj.config) - setattr(_config, "shots", max_shot_size) - experiment_list = [] - for experiment in qobj.experiments: - _id = str(uuid.uuid4()) - for _ in range(num_shot_jobs): - cpy_exp = copy.copy(experiment) - cpy_exp.header = copy.copy(experiment.header) - cpy_exp.header.metadata["id"] = _id - experiment_list.append(cpy_exp) - qobj_list.append(QasmQobj(_qid, _config, experiment_list, qobj.header)) - - return qobj_list - - -def _split_qobj(qobj, max_size, qobj_id, seed): - # Check if we don't need to split - if max_size is None or not max_size > 0: - return qobj - - num_jobs = ceil(len(qobj.experiments) / max_size) - if num_jobs == 1: - return qobj - - qobjs = [] - # Check for parameterizations - params = getattr(qobj.config, "parameterizations", None) - - for i in range(num_jobs): - sub_id = qobj_id or str(uuid.uuid4()) - indices = slice(i * max_size, (i + 1) * max_size) - sub_exp = qobj.experiments[indices] - sub_config = qobj.config - - if params is not None: - sub_config.parameterizations = params[indices] - sub_config = copy.copy(qobj.config) - - if seed > 0: - if sub_config is qobj.config: - sub_config = copy.copy(qobj.config) - - qobjs.append(type(qobj)(sub_id, sub_config, sub_exp, qobj.header)) - - return qobjs - - def _check_custom_instruction(experiments, optypes=None): """Return True if circuits contain instructions that cant be split""" # Check via optype list if available @@ -131,73 +67,3 @@ def _check_custom_instruction(experiments, optypes=None): # Otherwise iterate over instruction names return any("save_" in inst.name for exp in experiments for inst in exp.instructions) - - -def _set_seed(qobj_list, seed): - # set seed number to each qobj - seed_shift = 256 - - if seed == 0: - return - - for _each_qobj_list in qobj_list: - for _each_qobj in _each_qobj_list: - _each_qobj.config.seed_simulator = seed - seed = seed + seed_shift - - -def split_qobj(qobj, max_size=None, max_shot_size=None, qobj_id=None): - """Split a qobj and return a list of qobjs each with a single experiment. - - Args: - qobj (Qobj): The input qobj object to split - max_size (int or None): the maximum number of circuits per job. If - None don't split (Default: None). - max_shot_size (int or None): the maximum number of shots per job. If - None don't split (Default: None). - qobj_id (str): Optional, set a fixed qobj ID for all subjob qobjs. - - Raises: - JobError : If max_job_size > 1 and seed is set. - JobError : If custom instructions exist. - - Returns: - List: A list of qobjs. - """ - optypes = getattr(qobj.config, "optypes", None) - split_qobj_list = [] - if max_shot_size is not None and max_shot_size > 0: - if _check_custom_instruction(qobj.experiments, optypes): - raise JobError( - "`max_shot_size` option cannot be used with circuits" - " containing save instructions." - ) - - _seed = getattr(qobj.config, "seed_simulator", 0) - if hasattr(qobj.config, "noise_model"): - if _seed and max_size is not None and max_size > 1: - raise JobError( - "cannot support max_job_size > 1 for noise simulation, " - "when seed_simulator is set." - ) - - if max_shot_size is not None and max_shot_size > 0: - _qobj = _copy_qobj_for_noise(qobj, max_shot_size, qobj_id) - if isinstance(_qobj, list): - for each_qobj in _qobj: - _split = _split_qobj(each_qobj, max_size, qobj_id, _seed) - if isinstance(_split, QasmQobj): - split_qobj_list.append([_split]) - else: - split_qobj_list.append(_split) - _set_seed(split_qobj_list, _seed) - return split_qobj_list - - _qobj = _split_qobj(qobj, max_size, qobj_id, _seed) - if isinstance(_qobj, (PulseQobj, QasmQobj)): - return _qobj - else: - split_qobj_list.append(_qobj) - - _set_seed(split_qobj_list, _seed) - return split_qobj_list diff --git a/qiskit_aer/noise/noise_model.py b/qiskit_aer/noise/noise_model.py index 6ff1f91760..f8f3d6c15a 100644 --- a/qiskit_aer/noise/noise_model.py +++ b/qiskit_aer/noise/noise_model.py @@ -751,7 +751,7 @@ def add_quantum_error(self, error, instructions, qubits, warnings=True): for name, label in self._instruction_names_labels(instructions): self._check_number_of_qubits(error, name) if not isinstance(label, str): - raise NoiseError("Qobj invalid instructions.") + raise NoiseError("QuantumCircuit invalid instructions.") # Check number of qubits is correct for standard instructions self._check_number_of_qubits(error, name) if label in self._local_quantum_errors: diff --git a/releasenotes/notes/remove-standalone-qobj-0fc0f7ca479634f4.yaml b/releasenotes/notes/remove-standalone-qobj-0fc0f7ca479634f4.yaml new file mode 100644 index 0000000000..20eff1f3ba --- /dev/null +++ b/releasenotes/notes/remove-standalone-qobj-0fc0f7ca479634f4.yaml @@ -0,0 +1,5 @@ +--- +deprecations: + - | + Removed standalone simulators and remove using qobj as input circuits + diff --git a/src/controllers/aer_controller.hpp b/src/controllers/aer_controller.hpp index b3b1e8ec6a..016248f401 100755 --- a/src/controllers/aer_controller.hpp +++ b/src/controllers/aer_controller.hpp @@ -43,7 +43,6 @@ #include "framework/config.hpp" #include "framework/creg.hpp" -#include "framework/qobj.hpp" #include "framework/results/experiment_result.hpp" #include "framework/results/result.hpp" #include "framework/rng.hpp" @@ -77,14 +76,9 @@ class Controller { Controller() {} //----------------------------------------------------------------------- - // Execute qobj + // Execute circuits //----------------------------------------------------------------------- - // Load a QOBJ from a JSON file and execute on the State type - // class. - template - Result execute(const inputdata_t &qobj); - Result execute(std::vector> &circuits, Noise::NoiseModel &noise_model, const Config &config); @@ -457,52 +451,6 @@ std::vector Controller::available_devices() { return ret; } -//------------------------------------------------------------------------- -// Qobj execution -//------------------------------------------------------------------------- -template -Result Controller::execute(const inputdata_t &input_qobj) { - // Load QOBJ in a try block so we can catch parsing errors and still return - // a valid JSON output containing the error message. - try { - // Start QOBJ timer - auto timer_start = myclock_t::now(); - - // Initialize QOBJ - Qobj qobj(input_qobj); - auto qobj_time_taken = - std::chrono::duration(myclock_t::now() - timer_start).count(); - - // Set config - set_config(qobj.config); - - // Run qobj circuits - auto result = execute(qobj.circuits, qobj.noise_model, qobj.config); - - // Add QOBJ loading time - result.metadata.add(qobj_time_taken, "time_taken_load_qobj"); - - // Get QOBJ id and pass through header to result - result.qobj_id = qobj.id; - if (!qobj.header.empty()) { - result.header = qobj.header; - } - - // Stop the timer and add total timing data including qobj parsing - auto time_taken = - std::chrono::duration(myclock_t::now() - timer_start).count(); - result.metadata.add(time_taken, "time_taken"); - return result; - } catch (std::exception &e) { - // qobj was invalid, return valid output containing error message - Result result; - - result.status = Result::Status::error; - result.message = std::string("Failed to load qobj: ") + e.what(); - return result; - } -} - //------------------------------------------------------------------------- // Experiment execution //------------------------------------------------------------------------- diff --git a/src/controllers/controller_execute.hpp b/src/controllers/controller_execute.hpp index 14f05d67ba..fc217de84f 100644 --- a/src/controllers/controller_execute.hpp +++ b/src/controllers/controller_execute.hpp @@ -35,12 +35,6 @@ void initialize_libraries(const std::string &lib_dir) { Hacks::maybe_load_openmp(lib_dir); } -template -Result controller_execute(const inputdata_t &qobj) { - controller_t controller; - return controller.execute(qobj); -} - template Result controller_execute(std::vector> &input_circs, AER::Noise::NoiseModel &noise_model, diff --git a/src/controllers/state_controller.hpp b/src/controllers/state_controller.hpp index 7cf4843cb2..99ee9a8861 100644 --- a/src/controllers/state_controller.hpp +++ b/src/controllers/state_controller.hpp @@ -38,7 +38,6 @@ DISABLE_WARNING_POP #include "framework/creg.hpp" #include "framework/linalg/vector.hpp" -#include "framework/qobj.hpp" #include "framework/results/experiment_result.hpp" #include "framework/results/result.hpp" #include "framework/rng.hpp" diff --git a/src/framework/operations.hpp b/src/framework/operations.hpp index c8e0523a59..ba6136c6ef 100644 --- a/src/framework/operations.hpp +++ b/src/framework/operations.hpp @@ -687,7 +687,7 @@ inline std::ostream &operator<<(std::ostream &s, const Op &op) { inline void check_empty_name(const Op &op) { if (op.name.empty()) throw std::invalid_argument( - R"(Invalid qobj instruction ("name" is empty).)"); + R"(Invalid instruction ("name" is empty).)"); } // Raise an exception if qubits list is empty diff --git a/src/framework/pybind_json.hpp b/src/framework/pybind_json.hpp index 108e1b34ec..d1e2fba66e 100644 --- a/src/framework/pybind_json.hpp +++ b/src/framework/pybind_json.hpp @@ -221,10 +221,8 @@ json_t JSON::iterable_to_json_list(const py::handle &obj) { void std::to_json(json_t &js, const py::handle &obj) { static py::object PyNoiseModel = py::module::import("qiskit_aer.noise.noise_model").attr("NoiseModel"); - static py::object PyQasmQobj = - py::module::import("qiskit.qobj.qasm_qobj").attr("QasmQobj"); - static py::object PyQasmQobjHeader = - py::module::import("qiskit.qobj.common").attr("QobjExperimentHeader"); + static py::object PyCircuitHeader = + py::module::import("qiskit_aer.backends.backend_utils").attr("CircuitHeader"); if (py::isinstance(obj)) { js = obj.cast(); } else if (py::isinstance(obj)) { @@ -249,9 +247,7 @@ void std::to_json(json_t &js, const py::handle &obj) { return; } else if (py::isinstance(obj, PyNoiseModel)) { std::to_json(js, obj.attr("to_dict")()); - } else if (py::isinstance(obj, PyQasmQobj)) { - std::to_json(js, obj.attr("to_dict")()); - } else if (py::isinstance(obj, PyQasmQobjHeader)) { + } else if (py::isinstance(obj, PyCircuitHeader)) { std::to_json(js, obj.attr("to_dict")()); } else { auto type_str = std::string(py::str(obj.get_type())); diff --git a/src/framework/qobj.hpp b/src/framework/qobj.hpp deleted file mode 100644 index 2a25f8cfe2..0000000000 --- a/src/framework/qobj.hpp +++ /dev/null @@ -1,201 +0,0 @@ -/** - * This code is part of Qiskit. - * - * (C) Copyright IBM 2018, 2019. - * - * This code is licensed under the Apache License, Version 2.0. You may - * obtain a copy of this license in the LICENSE.txt file in the root directory - * of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. - * - * Any modifications or derivative works of this code must retain this - * copyright notice, and modified files need to carry a notice indicating - * that they have been altered from the originals. - */ - -#ifndef _aer_framework_qobj_hpp_ -#define _aer_framework_qobj_hpp_ - -#include -#include -#include -#include - -#include "framework/circuit.hpp" -#include "noise/noise_model.hpp" - -namespace AER { - -//============================================================================ -// Qobj data structure -//============================================================================ - -class Qobj { -public: - //---------------------------------------------------------------- - // Constructors - //---------------------------------------------------------------- - - // Default constructor and destructors - Qobj() = default; - virtual ~Qobj() = default; - - // Deserialization constructor - template - Qobj(const inputdata_t &input); - - //---------------------------------------------------------------- - // Data - //---------------------------------------------------------------- - std::string id; // qobj identifier passed to result - std::string type = "QASM"; // currently we only support QASM - std::vector> circuits; // List of circuits - json_t header; // (optional) passed through to result - json_t config; // (optional) qobj level config data - Noise::NoiseModel noise_model; // (optional) noise model -}; - -//============================================================================ -// JSON initialization and deserialization -//============================================================================ - -// JSON deserialization -inline void from_json(const json_t &js, Qobj &qobj) { qobj = Qobj(js); } - -template -Qobj::Qobj(const inputdata_t &input) { - // Check required fields - if (Parser::get_value(id, "qobj_id", input) == false) { - throw std::invalid_argument(R"(Invalid qobj: no "qobj_id" field)"); - }; - Parser::get_value(type, "type", input); - if (type != "QASM") { - throw std::invalid_argument(R"(Invalid qobj: "type" != "QASM".)"); - }; - if (Parser::check_key("experiments", input) == false) { - throw std::invalid_argument(R"(Invalid qobj: no "experiments" field.)"); - } - - // Apply qubit truncation - bool truncation = true; - - // Parse config - if (Parser::get_value(config, "config", input)) { - // Check for truncation option - Parser::get_value(truncation, "enable_truncation", config); - - // Load noise model - if (Parser::get_value(noise_model, "noise_model", config)) { - // If noise model has non-local errors disable trunction - if (noise_model.has_nonlocal_quantum_errors()) { - truncation = false; - } - } - } else { - config = json_t::object(); - } - - // Parse header - if (!Parser::get_value(header, "header", input)) { - header = json_t::object(); - } - - // Check for fixed simulator seed - // If simulator seed is set, each experiment will be set to a fixed (but - // different) seed Otherwise a random seed will be chosen for each experiment - int_t seed = -1; - uint_t seed_shift = 0; - bool has_simulator_seed = Parser::get_value( - seed, "seed_simulator", config); // config always json - const auto &circs = Parser::get_list("experiments", input); - const size_t num_circs = circs.size(); - - // Check if parameterized qobj - // It should be of the form - // [exp0_params, exp1_params, ...] - // where: - // expk_params = [((i, j), pars), ....] - // i is the instruction index in the experiment - // j is the param index in the instruction - // pars = [par0, par1, ...] is a list of different parameterizations - using pos_t = std::pair; - using exp_params_t = std::vector>>; - std::vector param_table; - Parser::get_value(param_table, "parameterizations", config); - - // Validate parameterizations for number of circuis - if (!param_table.empty() && param_table.size() != num_circs) { - throw std::invalid_argument( - R"(Invalid parameterized qobj: "parameterizations" length does not match number of circuits.)"); - } - - // Load circuits - for (size_t i = 0; i < num_circs; i++) { - if (param_table.empty() || param_table[i].empty()) { - // Get base circuit from qobj - auto circuit = std::make_shared( - static_cast(circs[i]), config, truncation); - // Non parameterized circuit - circuits.push_back(circuit); - } else { - // Get base circuit from qobj without truncation - auto circuit = std::make_shared( - static_cast(circs[i]), config, false); - // Load different parameterizations of the initial circuit - const auto circ_params = param_table[i]; - const size_t num_params = circ_params[0].second.size(); - const size_t num_instr = circuit->ops.size(); - for (size_t j = 0; j < num_params; j++) { - // Make a copy of the initial circuit - auto param_circuit = std::make_shared(*circuit); - for (const auto ¶ms : circ_params) { - const auto instr_pos = params.first.first; - const auto param_pos = params.first.second; - // Validation - if (instr_pos == AER::Config::GLOBAL_PHASE_POS) { - // negative position is for global phase - param_circuit->global_phase_angle = params.second[j]; - } else { - if ((uint_t)instr_pos >= num_instr) { - throw std::invalid_argument( - R"(Invalid parameterized qobj: instruction position out of range)"); - } - auto &op = param_circuit->ops[instr_pos]; - if ((uint_t)param_pos >= op.params.size()) { - throw std::invalid_argument( - R"(Invalid parameterized qobj: instruction param position out of range)"); - } - if (j >= params.second.size()) { - throw std::invalid_argument( - R"(Invalid parameterized qobj: parameterization value out of range)"); - } - // Update the param - op.params[param_pos] = params.second[j]; - } - } - // Run truncation. - // TODO: Truncation should be performed and parameters should be - // resolved after it. However, parameters are associated with indices of - // instructions, which can be changed in truncation. Therefore, current - // implementation performs truncation for each parameter set. - if (truncation) - param_circuit->set_params(true); - circuits.push_back(param_circuit); - } - } - } - // Override random seed with fixed seed if set - // We shift the seed for each successive experiment - // So that results aren't correlated between experiments - if (!has_simulator_seed) { - seed = circuits[0]->seed; - } - for (auto &circuit : circuits) { - circuit->seed = seed + seed_shift; - seed_shift += 2113; // Shift the seed - } -} - -//------------------------------------------------------------------------------ -} // namespace AER -//------------------------------------------------------------------------------ -#endif diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt deleted file mode 100644 index ea92f6946c..0000000000 --- a/test/CMakeLists.txt +++ /dev/null @@ -1,25 +0,0 @@ - -macro(add_test_executable target_name) - add_executable(${target_name} ${ARGN}) - set_target_properties(${target_name} PROPERTIES - LINKER_LANGUAGE CXX - CXX_STANDARD 14) - target_include_directories(${target_name} - PRIVATE ${AER_SIMULATOR_CPP_SRC_DIR} - PRIVATE ${AER_SIMULATOR_CPP_EXTERNAL_LIBS}) - target_link_libraries(${target_name} - PRIVATE AER_DEPENDENCY_PKG::catch2 - PRIVATE ${AER_LIBRARIES}) - add_test(${target_name} ${target_name}) - if(WIN32 AND NOT BLAS_LIB_PATH) - add_custom_command(TARGET test_linalg POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy_if_different - ${BACKEND_REDIST_DEPS} - $) - endif() -endmacro() - -add_test_executable(test_linalg "src/test_linalg.cpp") - -# Don't forget to add your test target here -add_custom_target(build_tests - test_linalg) diff --git a/test/__init__.py b/test/__init__.py deleted file mode 100644 index a0e6fee90c..0000000000 --- a/test/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# This code is part of Qiskit. -# -# (C) Copyright IBM 2018, 2019. -# -# This code is licensed under the Apache License, Version 2.0. You may -# obtain a copy of this license in the LICENSE.txt file in the root directory -# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. -# -# Any modifications or derivative works of this code must retain this -# copyright notice, and modified files need to carry a notice indicating -# that they have been altered from the originals. - -""" -Aer tests -""" diff --git a/test/asv.linux.conf.json b/test/asv.linux.conf.json deleted file mode 100644 index c5c72ec389..0000000000 --- a/test/asv.linux.conf.json +++ /dev/null @@ -1,191 +0,0 @@ -// To use this configuration for running the benchmarks, we have to run asv like this: -// $ asv --connfig asv.linux.conf.json -{ - // The version of the config file format. Do not change, unless - // you know what you are doing. - "version": 1, - - // The name of the project being benchmarked - "project": "qiskit-aer", - - // The project's homepage - "project_url": "http://qiskit.org/aer", - - // The URL or local path of the source code repository for the - // project being benchmarked - "repo": "../", - - // The Python project's subdirectory in your repo. If missing or - // the empty string, the project is assumed to be located at the root - // of the repository. - // "repo_subdir": "", - - // Customizable commands for building, installing, and - // uninstalling the project. See asv.conf.json documentation. - // - // "install_command": ["python -mpip install {wheel_file}"], - // "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"], - // "build_command": [ - // "python setup.py build", - // "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}" - // ], - - "install_command": [ - "python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit', True)\"", - "python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit_aer.egg-info', True)\"", - "pip install git+https://github.com/Qiskit/qiskit", - "pip install git+https://github.com/Qiskit/qiskit-aqua", - "python -mpip install {wheel_file}" - ], - "uninstall_command": [ - "return-code=any python -mpip uninstall -y qiskit", - "return-code=any python -mpip uninstall -y qiskit-aqua", - "return-code=any python -mpip uninstall -y {project}" - ], - "build_command": [ - "python -mpip install -U scikit-build", - "pip install git+https://github.com/Qiskit/qiskit", - "pip install git+https://github.com/Qiskit/qiskit-aqua", - "pip install pyscf", - "pip install matplotlib", - "python setup.py bdist_wheel --dist-dir={build_cache_dir} -- -DCMAKE_CXX_COMPILER=g++ -- -j" - ], - - // List of branches to benchmark. If not provided, defaults to "main" - // (for git) or "default" (for mercurial). - // "branches": ["main"], // for git - // "branches": ["default"], // for mercurial - - // The DVCS being used. If not set, it will be automatically - // determined from "repo" by looking at the protocol in the URL - // (if remote), or by looking for special directories, such as - // ".git" (if local). - // "dvcs": "git", - - // The tool to use to create environments. May be "conda", - // "virtualenv" or other value depending on the plugins in use. - // If missing or the empty string, the tool will be automatically - // determined by looking for tools on the PATH environment - // variable. - "environment_type": "conda", - - // timeout in seconds for installing any dependencies in environment - // defaults to 10 min - //"install_timeout": 600, - - // the base URL to show a commit for the project. - // "show_commit_url": "http://github.com/owner/project/commit/", - - // The Pythons you'd like to test against. If not provided, defaults - // to the current version of Python used to run `asv`. - // "pythons": ["3.7", "3.8", "3.9"], - - // The list of conda channel names to be searched for benchmark - // dependency packages in the specified order - // "conda_channels": ["conda-forge", "defaults"] - - // The matrix of dependencies to test. Each key is the name of a - // package (in PyPI) and the values are version numbers. An empty - // list or empty string indicates to just test against the default - // (latest) version. null indicates that the package is to not be - // installed. If the package to be tested is only available from - // PyPi, and the 'environment_type' is conda, then you can preface - // the package name by 'pip+', and the package will be installed via - // pip (with all the conda available packages installed first, - // followed by the pip installed packages). - // - // "matrix": { - // "numpy": ["1.6", "1.7"], - // "six": ["", null], // test with and without six installed - // "pip+emcee": [""], // emcee is only available for install with pip. - // }, - - //"matrix": { - // "pip+qiskit": [""], - //}, - - // Combinations of libraries/python versions can be excluded/included - // from the set to test. Each entry is a dictionary containing additional - // key-value pairs to include/exclude. - // - // An exclude entry excludes entries where all values match. The - // values are regexps that should match the whole string. - // - // An include entry adds an environment. Only the packages listed - // are installed. The 'python' key is required. The exclude rules - // do not apply to includes. - // - // In addition to package names, the following keys are available: - // - // - python - // Python version, as in the *pythons* variable above. - // - environment_type - // Environment type, as above. - // - sys_platform - // Platform, as in sys.platform. Possible values for the common - // cases: 'linux2', 'win32', 'cygwin', 'darwin'. - // - // "exclude": [ - // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows - // {"environment_type": "conda", "six": null}, // don't run without six on conda - // ], - // - // "include": [ - // // additional env for python2.7 - // {"python": "2.7", "numpy": "1.8"}, - // // additional env if run on windows+conda - // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""}, - // ], - - // The directory (relative to the current directory) that benchmarks are - // stored in. If not provided, defaults to "benchmarks" - "benchmark_dir": "benchmark", - - - // The directory (relative to the current directory) to cache the Python - // environments in. If not provided, defaults to "env" - // "env_dir": "env", - "env_dir": ".asv/envs", - - // The directory (relative to the current directory) that raw benchmark - // results are stored in. If not provided, defaults to "results". - // "results_dir": "results", - "results_dir": ".asv/results", - - // The directory (relative to the current directory) that the html tree - // should be written to. If not provided, defaults to "html". - // "html_dir": "html", - "html_dir": ".asv/html" - - // The number of characters to retain in the commit hashes. - // "hash_length": 8, - - // `asv` will cache results of the recent builds in each - // environment, making them faster to install next time. This is - // the number of builds to keep, per environment. - // "build_cache_size": 2, - - // The commits after which the regression search in `asv publish` - // should start looking for regressions. Dictionary whose keys are - // regexps matching to benchmark names, and values corresponding to - // the commit (exclusive) after which to start looking for - // regressions. The default is to start from the first commit - // with results. If the commit is `null`, regression detection is - // skipped for the matching benchmark. - // - // "regressions_first_commits": { - // "some_benchmark": "352cdf", // Consider regressions only after this commit - // "another_benchmark": null, // Skip regression detection altogether - // }, - - // The thresholds for relative change in results, after which `asv - // publish` starts reporting regressions. Dictionary of the same - // form as in ``regressions_first_commits``, with values - // indicating the thresholds. If multiple entries match, the - // maximum is taken. If no entry matches, the default is 5%. - // - // "regressions_thresholds": { - // "some_benchmark": 0.01, // Threshold of 1% - // "another_benchmark": 0.5, // Threshold of 50% - // }, -} diff --git a/test/asv.linux.cuda.conf.json b/test/asv.linux.cuda.conf.json deleted file mode 100644 index 317c8842c7..0000000000 --- a/test/asv.linux.cuda.conf.json +++ /dev/null @@ -1,191 +0,0 @@ -// To use this configuration for running the benchmarks, we have to run asv like this: -// $ asv --connfig asv.linux.conf.json -{ - // The version of the config file format. Do not change, unless - // you know what you are doing. - "version": 1, - - // The name of the project being benchmarked - "project": "qiskit-aer", - - // The project's homepage - "project_url": "http://qiskit.org/aer", - - // The URL or local path of the source code repository for the - // project being benchmarked - "repo": "../", - - // The Python project's subdirectory in your repo. If missing or - // the empty string, the project is assumed to be located at the root - // of the repository. - // "repo_subdir": "", - - // Customizable commands for building, installing, and - // uninstalling the project. See asv.conf.json documentation. - // - // "install_command": ["python -mpip install {wheel_file}"], - // "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"], - // "build_command": [ - // "python setup.py build", - // "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}" - // ], - - "install_command": [ - "python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit', True)\"", - "python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit_aer.egg-info', True)\"", - "pip install git+https://github.com/Qiskit/qiskit", - "pip install git+https://github.com/Qiskit/qiskit-aqua", - "python -mpip install {wheel_file}" - ], - "uninstall_command": [ - "return-code=any python -mpip uninstall -y qiskit", - "return-code=any python -mpip uninstall -y qiskit-aqua", - "return-code=any python -mpip uninstall -y {project}" - ], - "build_command": [ - "python -mpip install -U scikit-build", - "pip install git+https://github.com/Qiskit/qiskit", - "pip install git+https://github.com/Qiskit/qiskit-aqua", - "pip install pyscf", - "pip install matplotlib", - "python setup.py bdist_wheel --dist-dir={build_cache_dir} -- -DCMAKE_CXX_COMPILER=g++ -DAER_THRUST_BACKEND=CUDA -- -j" - ], - - // List of branches to benchmark. If not provided, defaults to "main" - // (for git) or "default" (for mercurial). - // "branches": ["main"], // for git - // "branches": ["default"], // for mercurial - - // The DVCS being used. If not set, it will be automatically - // determined from "repo" by looking at the protocol in the URL - // (if remote), or by looking for special directories, such as - // ".git" (if local). - // "dvcs": "git", - - // The tool to use to create environments. May be "conda", - // "virtualenv" or other value depending on the plugins in use. - // If missing or the empty string, the tool will be automatically - // determined by looking for tools on the PATH environment - // variable. - "environment_type": "conda", - - // timeout in seconds for installing any dependencies in environment - // defaults to 10 min - //"install_timeout": 600, - - // the base URL to show a commit for the project. - // "show_commit_url": "http://github.com/owner/project/commit/", - - // The Pythons you'd like to test against. If not provided, defaults - // to the current version of Python used to run `asv`. - // "pythons": ["2.7", "3.6"], - - // The list of conda channel names to be searched for benchmark - // dependency packages in the specified order - // "conda_channels": ["conda-forge", "defaults"] - - // The matrix of dependencies to test. Each key is the name of a - // package (in PyPI) and the values are version numbers. An empty - // list or empty string indicates to just test against the default - // (latest) version. null indicates that the package is to not be - // installed. If the package to be tested is only available from - // PyPi, and the 'environment_type' is conda, then you can preface - // the package name by 'pip+', and the package will be installed via - // pip (with all the conda available packages installed first, - // followed by the pip installed packages). - // - // "matrix": { - // "numpy": ["1.6", "1.7"], - // "six": ["", null], // test with and without six installed - // "pip+emcee": [""], // emcee is only available for install with pip. - // }, - - //"matrix": { - // "pip+qiskit": [""], - //}, - - // Combinations of libraries/python versions can be excluded/included - // from the set to test. Each entry is a dictionary containing additional - // key-value pairs to include/exclude. - // - // An exclude entry excludes entries where all values match. The - // values are regexps that should match the whole string. - // - // An include entry adds an environment. Only the packages listed - // are installed. The 'python' key is required. The exclude rules - // do not apply to includes. - // - // In addition to package names, the following keys are available: - // - // - python - // Python version, as in the *pythons* variable above. - // - environment_type - // Environment type, as above. - // - sys_platform - // Platform, as in sys.platform. Possible values for the common - // cases: 'linux2', 'win32', 'cygwin', 'darwin'. - // - // "exclude": [ - // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows - // {"environment_type": "conda", "six": null}, // don't run without six on conda - // ], - // - // "include": [ - // // additional env for python2.7 - // {"python": "2.7", "numpy": "1.8"}, - // // additional env if run on windows+conda - // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""}, - // ], - - // The directory (relative to the current directory) that benchmarks are - // stored in. If not provided, defaults to "benchmarks" - "benchmark_dir": "benchmark", - - - // The directory (relative to the current directory) to cache the Python - // environments in. If not provided, defaults to "env" - // "env_dir": "env", - "env_dir": ".asv/envs", - - // The directory (relative to the current directory) that raw benchmark - // results are stored in. If not provided, defaults to "results". - // "results_dir": "results", - "results_dir": ".asv/results", - - // The directory (relative to the current directory) that the html tree - // should be written to. If not provided, defaults to "html". - // "html_dir": "html", - "html_dir": ".asv/html" - - // The number of characters to retain in the commit hashes. - // "hash_length": 8, - - // `asv` will cache results of the recent builds in each - // environment, making them faster to install next time. This is - // the number of builds to keep, per environment. - // "build_cache_size": 2, - - // The commits after which the regression search in `asv publish` - // should start looking for regressions. Dictionary whose keys are - // regexps matching to benchmark names, and values corresponding to - // the commit (exclusive) after which to start looking for - // regressions. The default is to start from the first commit - // with results. If the commit is `null`, regression detection is - // skipped for the matching benchmark. - // - // "regressions_first_commits": { - // "some_benchmark": "352cdf", // Consider regressions only after this commit - // "another_benchmark": null, // Skip regression detection altogether - // }, - - // The thresholds for relative change in results, after which `asv - // publish` starts reporting regressions. Dictionary of the same - // form as in ``regressions_first_commits``, with values - // indicating the thresholds. If multiple entries match, the - // maximum is taken. If no entry matches, the default is 5%. - // - // "regressions_thresholds": { - // "some_benchmark": 0.01, // Threshold of 1% - // "another_benchmark": 0.5, // Threshold of 50% - // }, -} diff --git a/test/data/qobj_snapshot_expval_matrix.json b/test/data/qobj_snapshot_expval_matrix.json deleted file mode 100644 index 0b93c534f7..0000000000 --- a/test/data/qobj_snapshot_expval_matrix.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "qobj_id": "matrix_observable_snapshot_example", - "schema_version": "1.0.0", - "type": "QASM", - "experiments": [ - { - "config": { - "shots": 1000, - "memory_slots": 2, - "n_qubits": 2 - }, - "instructions": [ - {"name": "h", "qubits": [0]}, - {"name": "cx", "qubits": [0, 1]}, - { - "name": "snapshot", - "type": "expectation_value_matrix", - "label": "pre_measure", - "params": [ - [1, [[[0], [[1, 0], [0, -1]]], - [[1], [[1, 0], [0, -1]]]] - ] - ] - }, - { - "name": "snapshot", - "type": "expectation_value_matrix", - "label": "pre_measure", - "params": [ - [1, [[[0], [[1, 0], [0, -1]]]]], - [1, [[[1], [[1, 0], [0, -1]]]]] - ] - }, - {"name": "measure", "qubits": [0, 1], "memory": [0, 1]}, - { - "name": "snapshot", - "type": "expectation_value_matrix", - "label": "post_measure", - "params": [ - [1, [[[0], [[1, 0], [0, -1]]], - [[1], [[1, 0], [0, -1]]]] - ] - ] - }, - { - "name": "snapshot", - "type": "expectation_value_matrix", - "label": "post_measure", - "params": [ - [1, [[[0], [[1, 0], [0, -1]]]]], - [1, [[[1], [[1, 0], [0, -1]]]]] - ] - } - ] - } - ] -} \ No newline at end of file diff --git a/test/data/qobj_snapshot_expval_pauli.json b/test/data/qobj_snapshot_expval_pauli.json deleted file mode 100644 index ef0c1fb5f5..0000000000 --- a/test/data/qobj_snapshot_expval_pauli.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "qobj_id": "pauli_observable_snapshot_example", - "schema_version": "1.0.0", - "type": "QASM", - "experiments": [ - { - "config": { - "shots": 1000, - "memory_slots": 2, - "n_qubits": 2 - }, - "instructions": [ - {"name": "h", "qubits": [0]}, - {"name": "cx", "qubits": [0, 1]}, - { - "name": "snapshot", - "type": "expectation_value_pauli", - "label": "pre_measure", - "qubits": [0, 1], - "params": [[[1, 0] , "ZZ"]] - }, - { - "name": "snapshot", - "type": "expectation_value_pauli", - "label": "pre_measure", - "qubits": [0, 1], - "params": [[[1, 0] , "ZI"], [[1, 0], "IZ"]] - }, - {"name": "measure", "qubits": [0], "memory": [0]}, - {"name": "measure", "qubits": [0], "memory": [1]}, - { - "name": "snapshot", - "type": "expectation_value_pauli", - "label": "post_measure", - "qubits": [0, 1], - "params": [[[1, 0] , "ZI"], [[1, 0], "ZZ"]] - }, - { - "name": "snapshot", - "type": "expectation_value_pauli", - "label": "post_measure", - "qubits": [0, 1], - "params": [[[1, 0] , "ZI"], [[1, 0], "IZ"]] - } - ] - } - ] -} \ No newline at end of file diff --git a/test/data/qobj_snapshot_probs.json b/test/data/qobj_snapshot_probs.json deleted file mode 100644 index 03faab580a..0000000000 --- a/test/data/qobj_snapshot_probs.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "qobj_id": "probabilities_snapshot_example", - "schema_version": "1.0.0", - "type": "QASM", - "experiments": [ - { - "config": { - "shots": 1000, - "memory_slots": 2, - "n_qubits": 2 - }, - "instructions": [ - {"name": "h", "qubits": [0]}, - {"name": "cx", "qubits": [0, 1]}, - {"name": "snapshot", "type": "probabilities", - "label": "pre_measure", "qubits": [0, 1]}, - {"name": "measure", "qubits": [0], "memory": [0]}, - {"name": "measure", "qubits": [1], "memory": [1]}, - {"name": "snapshot", "type": "probabilities", - "label": "post_measure", "qubits": [0, 1]} - ] - } - ] -} \ No newline at end of file diff --git a/test/data/qobj_snapshot_statevector.json b/test/data/qobj_snapshot_statevector.json deleted file mode 100644 index 1164a7d0ff..0000000000 --- a/test/data/qobj_snapshot_statevector.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "qobj_id": "state_snapshot_example", - "schema_version": "1.0.0", - "type": "QASM", - "experiments": [ - { - "config": { - "shots": 1, - "memory_slots": 0, - "n_qubits": 2 - }, - "instructions": [ - {"name": "snapshot", "type": "statevector", "label": "initial"}, - {"name": "h", "qubits": [0]}, - {"name": "snapshot", "type": "statevector", "label": "middle"}, - {"name": "cx", "qubits": [0, 1]}, - {"name": "snapshot", "type": "statevector", "label": "final"} - ] - } - ] -} \ No newline at end of file diff --git a/test/src/test_linalg.cpp b/test/src/test_linalg.cpp deleted file mode 100644 index 29dc037c75..0000000000 --- a/test/src/test_linalg.cpp +++ /dev/null @@ -1,401 +0,0 @@ -/** - * This code is part of Qiskit. - * - * (C) Copyright IBM 2018, 2019, 2020. - * - * This code is licensed under the Apache License, Version 2.0. You may - * obtain a copy of this license in the LICENSE.txt file in the root directory - * of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. - * - * Any modifications or derivative works of this code must retain this - * copyright notice, and modified files need to carry a notice indicating - * that they have been altered from the originals. - */ - -#define _USE_MATH_DEFINES -#include -#include -#include -#include - -#include -#include -#include - -#define CATCH_CONFIG_MAIN - -#include - -#include "utils.hpp" - -using namespace AER::Test::Utilities; - -namespace { -// check if polar coordinates are almost equal -// r -> (0,inf) -// angle -> (-PI, PI) -template -T eps() { - return std::numeric_limits::epsilon(); -} - -template -bool check_polar_coords(T r, T angle, T r_2, T angle_2, T max_diff = eps(), - T max_relative_diff = eps()); - -template -bool check_eigenvector(const std::vector> &expected_eigen, - const std::vector> &actual_eigen, - T max_diff = eps(), T max_relative_diff = eps()); - -// Sometimes eigenvectors differ by a factor and/or phase -// This function compares them taking this into account -template -bool check_all_eigenvectors(const matrix> &expected, - const matrix> &actual, - T max_diff = eps(), - T max_relative_diff = eps()); - -template -using scenarioData = std::tuple>, - matrix>, std::vector>; - -template -matrix> herm_mat_2d(); -template -matrix> herm_mat_2d_eigenvectors(); -template -std::vector herm_mat_2d_eigenvalues(); -template -scenarioData get_herm_2d_scen(); - -template -matrix> psd_mat_2d(); -template -matrix> psd_mat_2d_eigenvectors(); -template -std::vector psd_mat_2d_eigenvalues(); -template -scenarioData get_psd_2d_scen(); - -template -matrix> psd_mat_2d_with_zero(); -template -matrix> psd_mat_2d_wiht_zero_eigenvectors(); -template -std::vector psd_mat_2d_wiht_zero_eigenvalues(); -template -scenarioData get_psd_mat_2d_wiht_zero_scen(); -} // namespace - -TEST_CASE("Basic Matrix Ops", "[matrix]") { - auto mat = matrix>(2, 2); - mat(0, 0) = std::complex(1.0, 0.0); - mat(0, 1) = std::complex(1.0, -2.0); - mat(1, 0) = std::complex(1.0, 2.0); - mat(1, 1) = std::complex(0.5, 0.0); - - SECTION("matrix - col_index") { - std::vector> col0{{1.0, 0.0}, {1.0, 2.0}}; - std::vector> col1{{1.0, -2.0}, {0.5, 0.0}}; - - REQUIRE(compare(col0, mat.col_index(0))); - REQUIRE(compare(col1, mat.col_index(1))); - } - - SECTION("matrix - col_index") { - std::vector> row0{{1.0, 0.0}, {1.0, -2.0}}; - std::vector> row1{{1.0, 2.0}, {0.5, 0.0}}; - - REQUIRE(compare(row0, mat.row_index(0))); - REQUIRE(compare(row1, mat.row_index(1))); - } -} - -TEMPLATE_TEST_CASE("Linear Algebra utilities", "[eigen_hermitian]", float, - double) { - std::string scenario_name; - matrix> herm_mat; - matrix> expected_eigenvectors; - std::vector expected_eigenvalues; - std::tie(scenario_name, herm_mat, expected_eigenvectors, - expected_eigenvalues) = - GENERATE(get_herm_2d_scen(), get_psd_2d_scen(), - get_psd_mat_2d_wiht_zero_scen()); - - // We are checking results from a numerical method so we allow for some room - // in comparisons - TestType eps_threshold = 5 * eps(); - - SECTION(scenario_name + ": zheevx") { - SECTION("sanity check - eigenvals/vecs should recreate original") { - // sanity check - matrix> sanity_value(herm_mat.GetRows(), - herm_mat.GetColumns()); - for (size_t j = 0; j < expected_eigenvalues.size(); j++) { - sanity_value += - expected_eigenvalues[j] * - AER::Utils::projector(expected_eigenvectors.col_index(j)); - } - REQUIRE(compare(herm_mat, sanity_value, eps_threshold, eps_threshold)); - } - SECTION("actual check - heevx returns correctly") { - std::vector eigenvalues; - matrix> eigenvectors; - eigensystem_hermitian(herm_mat, eigenvalues, eigenvectors); - - // test equality - REQUIRE(check_all_eigenvectors(expected_eigenvectors, eigenvectors, - eps_threshold, eps_threshold)); - REQUIRE(compare(expected_eigenvalues, eigenvalues, eps_threshold, - eps_threshold)); - // test reconstruction - matrix> value(herm_mat.GetRows(), - herm_mat.GetColumns()); - for (size_t j = 0; j < eigenvalues.size(); j++) { - value += - AER::Utils::projector(eigenvectors.col_index(j)) * eigenvalues[j]; - } - REQUIRE(compare(herm_mat, value, eps_threshold, eps_threshold)); - } - } -} - -TEST_CASE("Framework Utilities", "[almost_equal]") { - SECTION("The maximum difference between two scalars over 1.0 is greater than " - "epsilon, so they are amlmost equal") { - double first = 1.0 + eps(); - double actual = 1.0; - // Because the max_diff param is bigger than epsilon, this should be almost - // equal - REQUIRE(AER::Linalg::almost_equal(first, actual)); //, 1e-15, 1e-15)); - } - - SECTION("The difference between two scalars really close to 0 should say are " - "almost equal") { - double first = 5e-323; // Really close to the min magnitude of double - double actual = 6e-323; - REQUIRE(AER::Linalg::almost_equal(first, actual)); //, 1e-323, 1e-323)); - } - - SECTION("The maximum difference between two complex of doubles over 1.0 is " - "greater than epsilon, so they are almost equal") { - std::complex first = {eps() + double(1.0), - eps() + double(1.0)}; - std::complex actual{1.0, 1.0}; - // Because the max_diff param is bigger than epsilon, this should be almost - // equal - REQUIRE(AER::Linalg::almost_equal(first, actual)); //, 1e-15, 1e-15)); - } - - SECTION("The difference between two complex of doubles really close to 0 " - "should say are almost equal") { - std::complex first = { - 5e-323, 5e-323}; // Really close to the min magnitude of double - std::complex actual = {6e-323, 6e-323}; - - REQUIRE(AER::Linalg::almost_equal(first, actual)); // 1e-323, 1e-323)); - } -} - -TEST_CASE("Test_utils", "[check_polar_coords]") { - auto r = 1.0; - auto angle = M_PI_2; - auto r_2 = 1.0 + eps(); - auto angle_2 = M_PI_2 + eps(); - - SECTION("Check 2 numbers that are equal") { - REQUIRE(check_polar_coords(r, angle, r_2, angle_2)); - } - - SECTION("Check 2 numbers that differ in absolute value") { - r_2 = r_2 + 1e3 * eps(); - REQUIRE(!check_polar_coords(r, angle, r_2, angle_2)); - } - - SECTION("Check 2 numbers that differ in absolute value") { - angle_2 = angle_2 + 1e3 * eps(); - REQUIRE(!check_polar_coords(r, angle, r_2, angle_2)); - } - - SECTION("Check corner case: close to +/-0 angles") { - angle = 0.0 - eps() / 2.; - angle_2 = -angle; - REQUIRE(check_polar_coords(r, angle, r_2, angle_2)); - } - - SECTION("Check corner case: angle PI and angle -PI") { - angle = M_PI - eps(); - angle_2 = -angle; - REQUIRE(check_polar_coords(r, angle, r_2, angle_2)); - } -} - -namespace { -template -matrix> herm_mat_2d() { - auto mat = matrix>(2, 2); - mat(0, 0) = std::complex(1.0, 0.0); - mat(0, 1) = std::complex(1.0, 2.0); - mat(1, 0) = std::complex(1.0, -2.0); - mat(1, 1) = std::complex(0.5, 0.0); - return mat; -} - -template -matrix> herm_mat_2d_eigenvectors() { - auto mat = matrix>(2, 2); - auto den = 3. * std::sqrt(5.); - mat(0, 0) = std::complex(-2 / den, -4 / den); - mat(1, 0) = std::complex(5 / den, 0.0); - mat(0, 1) = std::complex(-1. / 3., -2. / 3.); - mat(1, 1) = std::complex(-2. / 3., 0); - return mat; -} - -template -std::vector herm_mat_2d_eigenvalues() { - return {-1.5, 3.0}; -} - -template -scenarioData get_herm_2d_scen() { - return {"Hermitian matrix 2x2", herm_mat_2d(), - herm_mat_2d_eigenvectors(), herm_mat_2d_eigenvalues()}; -} - -template -matrix> psd_mat_2d() { - auto psd_matrix = matrix>(2, 2); - - psd_matrix(0, 0) = std::complex(13., 0.); - psd_matrix(0, 1) = std::complex(0., 5.); - - psd_matrix(1, 0) = std::complex(0., -5.); - psd_matrix(1, 1) = std::complex(2., 0.); - - return psd_matrix; -} - -template -matrix> psd_mat_2d_eigenvectors() { - matrix> expected_eigenvectors(2, 2); - - expected_eigenvectors(0, 0) = std::complex(0, -0.3605966767761846214491); - expected_eigenvectors(0, 1) = std::complex(0, -0.9327218431547380506075); - - expected_eigenvectors(1, 0) = std::complex(0.9327218431547380506075, 0); - expected_eigenvectors(1, 1) = std::complex(-0.3605966767761846214491, 0); - - return expected_eigenvectors; -} - -template -std::vector psd_mat_2d_eigenvalues() { - return {0.06696562634074720854471252, 14.93303437365925212532147}; -} - -template -scenarioData get_psd_2d_scen() { - return {"PSD matrix 2x2", psd_mat_2d(), psd_mat_2d_eigenvectors(), - psd_mat_2d_eigenvalues()}; -} - -template -matrix> psd_mat_2d_with_zero() { - auto psd_matrix = matrix>(2, 2); - - psd_matrix(0, 0) = std::complex(1., 0.); - psd_matrix(0, 1) = std::complex(2., 0.); - - psd_matrix(1, 0) = std::complex(2., 0.); - psd_matrix(1, 1) = std::complex(4., 0.); - - return psd_matrix; -} - -template -matrix> psd_mat_2d_wiht_zero_eigenvectors() { - matrix> expected_eigenvectors(2, 2); - - expected_eigenvectors(0, 0) = std::complex(-2. / std::sqrt(5.), 0); - expected_eigenvectors(0, 1) = std::complex(1. / std::sqrt(5.), 0); - - expected_eigenvectors(1, 0) = std::complex(1. / std::sqrt(5.), 0); - expected_eigenvectors(1, 1) = std::complex(2. / std::sqrt(5.), 0); - - return expected_eigenvectors; -} - -template -std::vector psd_mat_2d_wiht_zero_eigenvalues() { - return {0.0, 5.0}; -} - -template -scenarioData get_psd_mat_2d_wiht_zero_scen() { - return {"PSD matrix 2x2 with a zero eigen value", psd_mat_2d_with_zero(), - psd_mat_2d_wiht_zero_eigenvectors(), - psd_mat_2d_wiht_zero_eigenvalues()}; -} - -template -bool check_polar_coords(T r, T angle, T r_2, T angle_2, T max_diff, - T max_relative_diff) { - if (!AER::Linalg::almost_equal(r, r_2, max_diff, max_relative_diff)) - return false; - if (!AER::Linalg::almost_equal(angle, angle_2, max_diff, max_relative_diff)) { - // May be corner case with PI and -PI - T angle_plus = angle > 0. ? angle : angle + 2 * M_PI; - T angle_2_plus = angle_2 > 0. ? angle_2 : angle_2 + 2 * M_PI; - if (!AER::Linalg::almost_equal(angle_plus, angle_2_plus, max_diff, - max_relative_diff)) - return false; - } - return true; -} - -template -bool check_eigenvector(const std::vector> &expected_eigen, - const std::vector> &actual_eigen, - T max_diff, T max_relative_diff) { - auto div = expected_eigen[0] / actual_eigen[0]; - T r = std::abs(div); - T angle = std::arg(div); - for (size_t j = 1; j < expected_eigen.size(); j++) { - auto div_2 = expected_eigen[j] / actual_eigen[j]; - T r_2 = std::abs(div_2); - T angle_2 = std::arg(div_2); - // Check that factor is consistent across components - if (!check_polar_coords(r, angle, r_2, angle_2, max_diff, - max_relative_diff)) { - return false; - } - } - return true; -} - -template -bool check_all_eigenvectors(const matrix> &expected, - const matrix> &actual, T max_diff, - T max_relative_diff) { - auto col_num = expected.GetColumns(); - if (expected.size() != actual.size() || - expected.GetColumns() != expected.GetColumns()) { - return false; - } - for (size_t i = 0; i < col_num; i++) { - auto expected_eigen = expected.col_index(i); - auto actual_eigen = actual.col_index(i); - - if (!check_eigenvector(expected_eigen, actual_eigen, max_diff, - max_relative_diff)) { - std::cout << "Expected: " << std::setprecision(16) << expected - << std::endl; - std::cout << "Actual: " << actual << std::endl; - return false; - } - } - return true; -} -} // namespace \ No newline at end of file diff --git a/test/src/test_snapshot.cpp b/test/src/test_snapshot.cpp deleted file mode 100644 index cd7d309b7d..0000000000 --- a/test/src/test_snapshot.cpp +++ /dev/null @@ -1,44 +0,0 @@ -#define CATCH_CONFIG_MAIN -#include -#include - -#include - -#include "utils.hpp" - -namespace AER { -namespace Test { - -TEST_CASE("Simulators Snapshot", "[snaphot]") { - std::map qobj_snapshots; - qobj_snapshots["state"] = AER::Test::Utilities::load_qobj( - "../../test/data/qobj_snapshot_statevector.json"); - qobj_snapshots["probs"] = AER::Test::Utilities::load_qobj( - "../../test/data/qobj_snapshot_probs.json"); - qobj_snapshots["pauli"] = AER::Test::Utilities::load_qobj( - "../../test/data/qobj_snapshot_expval_pauli.json"); - qobj_snapshots["matrix"] = AER::Test::Utilities::load_qobj( - "../../test/data/qobj_snapshot_expval_matrix.json"); - - AER::Simulator::QasmController sim{}; - - SECTION("State simulator snapshot") { - auto expected_result = R"({ - "final":[[[0.7071067811865476,0.0],[0.0,0.0],[0.0,0.0],[0.7071067811865475,0.0]]], - "initial":[[[1.0,0.0],[0.0,0.0],[0.0,0.0],[0.0,0.0]]], - "middle":[[[0.7071067811865476,0.0],[0.7071067811865475,0.0],[0.0,0.0],[0.0,0.0]]] - })"_json; - auto result = sim.execute(qobj_snapshots["state"]); - result = result["results"][0]["data"]["snapshots"]["state"]; - REQUIRE(result == expected_result); - } - SECTION("Probs simulator snapshot") { REQUIRE(false); } - SECTION("Pauli simulator snaphsot") { REQUIRE(false); } - SECTION("Unitary simulator snapshot") { REQUIRE(false); } -} - -//------------------------------------------------------------------------------ -} // end namespace Test -//------------------------------------------------------------------------------ -} // end namespace AER -//------------------------------------------------------------------------------ diff --git a/test/src/test_snapshot_bdd.cpp b/test/src/test_snapshot_bdd.cpp deleted file mode 100644 index b967831a0d..0000000000 --- a/test/src/test_snapshot_bdd.cpp +++ /dev/null @@ -1,49 +0,0 @@ -#define CATCH_CONFIG_MAIN -#include -#include - -#include - -#include "utils.hpp" - -namespace AER { -namespace Test { - -SCENARIO("We can get snapshots from different simulator types") { - GIVEN("A Qobj with snapshot information for every simulator type") { - - std::map qobj_snapshots; - qobj_snapshots["state"] = AER::Test::Utilities::load_qobj( - "../../test/data/qobj_snapshot_statevector.json"); - qobj_snapshots["probs"] = AER::Test::Utilities::load_qobj( - "../../test/data/qobj_snapshot_probs.json"); - qobj_snapshots["pauli"] = AER::Test::Utilities::load_qobj( - "../../test/data/qobj_snapshot_expval_pauli.json"); - qobj_snapshots["matrix"] = AER::Test::Utilities::load_qobj( - "../../test/data/qobj_snapshot_expval_matrix.json"); - - AER::Simulator::QasmController sim{}; - - WHEN("we get the expected results") { - auto expected_result = R"({ - "final":[[[0.7071067811865476,0.0],[0.0,0.0],[0.0,0.0],[0.7071067811865475,0.0]]], - "initial":[[[1.0,0.0],[0.0,0.0],[0.0,0.0],[0.0,0.0]]], - "middle":[[[0.7071067811865476,0.0],[0.7071067811865475,0.0],[0.0,0.0],[0.0,0.0]]] - })"_json; - THEN("the state simulator should pass") { - auto result = sim.execute(qobj_snapshots["state"]); - result = result["results"][0]["data"]["snapshots"]["state"]; - REQUIRE(result == expected_result); - } - THEN("the probs simulator should pass") { REQUIRE(false); } - THEN("the pauli simulator should pass") { REQUIRE(false); } - THEN("the unitary matrix simulator should pass") { REQUIRE(false); } - } - } -} - -//------------------------------------------------------------------------------ -} // end namespace Test -//------------------------------------------------------------------------------ -} // end namespace AER -//------------------------------------------------------------------------------ diff --git a/test/src/utils.hpp b/test/src/utils.hpp deleted file mode 100644 index f0d3865b98..0000000000 --- a/test/src/utils.hpp +++ /dev/null @@ -1,128 +0,0 @@ -/** - * This code is part of Qiskit. - * - * (C) Copyright IBM 2018, 2019, 2020. - * - * This code is licensed under the Apache License, Version 2.0. You may - * obtain a copy of this license in the LICENSE.txt file in the root directory - * of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. - * - * Any modifications or derivative works of this code must retain this - * copyright notice, and modified files need to carry a notice indicating - * that they have been altered from the originals. - */ - -#include "framework/json.hpp" -#include - -#include - -namespace Catch { -template -std::string convertMyTypeToString(const std::vector &value) { - std::stringstream oss; - oss << value; - return oss.str(); -} - -template -struct StringMaker> { - static std::string convert(const std::vector &value) { - return convertMyTypeToString(value); - } -}; -} // namespace Catch - -namespace AER { -namespace Test { -namespace Utilities { -inline json_t load_qobj(const std::string &filename) { - return JSON::load(filename); -} - -template -T calculate_floats(T start, T decrement, int count) { - for (int i = 0; i < count; ++i) - start -= decrement; - return start; -} - -template -bool _compare(const matrix &lhs, const matrix &rhs, - U max_diff = std::numeric_limits::epsilon(), - U max_relative_diff = std::numeric_limits::epsilon()) { - bool res = true; - std::ostringstream message; - if (lhs.size() != rhs.size()) { - res = false; - } - - for (size_t i = 0; i < lhs.GetRows(); ++i) { - for (size_t j = 0; j < lhs.GetColumns(); ++j) { - if (!(AER::Linalg::almost_equal(lhs(i, j), rhs(i, j), max_diff, - max_relative_diff))) { - message << "Matrices differ at element: (" << i << ", " << j << ")" - << std::setprecision(22) << ". [" << lhs(i, j) << "] != [" - << rhs(i, j) << "]\n"; - res = false; - } - } - } - if (!res) { - message << "Matrices differ: " << lhs << " != " << rhs << std::endl; - std::cout << message.str(); - } - return res; -} - -template -bool compare(const matrix &lhs, const matrix &rhs, - T max_diff = std::numeric_limits::epsilon(), - T max_relative_diff = std::numeric_limits::epsilon()) { - return _compare(lhs, rhs, max_diff, max_relative_diff); -} - -template -bool compare(const matrix> &lhs, - const matrix> &rhs, - T max_diff = std::numeric_limits::epsilon(), - T max_relative_diff = std::numeric_limits::epsilon()) { - return _compare(lhs, rhs, max_diff, max_relative_diff); -} - -template -bool _compare(const std::vector &lhs, const std::vector &rhs, - U max_diff = std::numeric_limits::epsilon(), - U max_relative_diff = std::numeric_limits::epsilon()) { - if (lhs.size() != rhs.size()) - return false; - for (size_t i = 0; i < lhs.size(); ++i) { - if (!(AER::Linalg::almost_equal(lhs[i], rhs[i], max_diff, - max_relative_diff))) { - std::cout << "Vectors differ at element: " << i << std::setprecision(22) - << ". [" << lhs[i] << "] != [" << rhs[i] << "]\n"; - std::cout << "Vectors differ: " << Catch::convertMyTypeToString(lhs) - << " != " << Catch::convertMyTypeToString(rhs) << std::endl; - return false; - } - } - return true; -} - -template -bool compare(const std::vector &lhs, const std::vector &rhs, - T max_diff = std::numeric_limits::epsilon(), - T max_relative_diff = std::numeric_limits::epsilon()) { - return _compare(lhs, rhs, max_diff, max_relative_diff); -} - -template -bool compare(const std::vector> &lhs, - const std::vector> &rhs, - T max_diff = std::numeric_limits::epsilon(), - T max_relative_diff = std::numeric_limits::epsilon()) { - return _compare(lhs, rhs, max_diff, max_relative_diff); -} -} // namespace Utilities -} // namespace Test -} // namespace AER \ No newline at end of file diff --git a/test/terra/backends/aer_simulator/test_circuit.py b/test/terra/backends/aer_simulator/test_circuit.py index cd4c0f7806..42367b67ab 100644 --- a/test/terra/backends/aer_simulator/test_circuit.py +++ b/test/terra/backends/aer_simulator/test_circuit.py @@ -193,29 +193,6 @@ def test_metadata_protected(self): deepcopy(job.result()) - def test_run_qobj(self): - """Test qobj run""" - - qubits = QuantumRegister(3) - clbits = ClassicalRegister(3) - - circuit = QuantumCircuit(qubits, clbits) - circuit.h(qubits[0]) - circuit.cx(qubits[0], qubits[1]) - circuit.cx(qubits[0], qubits[2]) - - for q, c in zip(qubits, clbits): - circuit.measure(q, c) - - backend = self.backend() - - shots = 1000 - with self.assertWarns(DeprecationWarning): - result = backend.run(assemble(circuit), shots=shots).result() - - self.assertSuccess(result) - self.compare_counts(result, [circuit], [{"0x0": 500, "0x7": 500}], delta=0.05 * shots) - def test_numpy_integer_shots(self): """Test implicit cast of shot option from np.int_ to int.""" diff --git a/test/terra/backends/simulator_test_case.py b/test/terra/backends/simulator_test_case.py index 2173c2c413..c075ff0140 100644 --- a/test/terra/backends/simulator_test_case.py +++ b/test/terra/backends/simulator_test_case.py @@ -20,8 +20,6 @@ from test.terra.common import QiskitAerTestCase from qiskit.circuit import QuantumCircuit from qiskit.compiler import assemble -from qiskit_aer.backends.backend_utils import cpp_execute_qobj -from qiskit_aer.backends.controller_wrappers import aer_controller_execute class SimulatorTestCase(QiskitAerTestCase): @@ -117,16 +115,12 @@ def check_cuStateVec(devices): if "GPU" in devices: dummy_circ = QuantumCircuit(1) dummy_circ.id(0) - qobj = assemble( - dummy_circ, - optimization_level=0, - shots=1, - method="statevector", - device="GPU", - cuStateVec_enable=True, - ) # run dummy circuit to check if Aer is built with cuStateVec - result = cpp_execute_qobj(aer_controller_execute(), qobj) - return result.get("success", False) + sim = AerSimulator() + result = sim.run( + dummy_circ, shots=1, method="statevector", device="GPU", cuStateVec_enable=True + ).result() + success = getattr(result, "success", False) + return success else: return False diff --git a/test/terra/backends/test_parameterized_qobj.py b/test/terra/backends/test_parameterized_circuit.py similarity index 82% rename from test/terra/backends/test_parameterized_qobj.py rename to test/terra/backends/test_parameterized_circuit.py index b37f2d981e..9a9290f202 100644 --- a/test/terra/backends/test_parameterized_qobj.py +++ b/test/terra/backends/test_parameterized_circuit.py @@ -34,92 +34,11 @@ from qiskit_aer import AerSimulator, AerError -class TestParameterizedQobj(common.QiskitAerTestCase): - """Parameterized Qobj extension tests""" +class TestParameterizedCircuit(common.QiskitAerTestCase): + """Parameterized circuit extension tests""" BACKEND_OPTS = {"seed_simulator": 2113} - @staticmethod - def parameterized_qobj( - backend, - shots=1000, - measure=True, - snapshot=False, - save_state=False, - ): - """Return ParameterizedQobj for settings.""" - pershot = shots == 1 - pcirc1, param1 = save_expval_circuit_parameterized( - pershot=pershot, - measure=measure, - snapshot=snapshot, - ) - circuits2to4 = save_expval_circuits( - pauli=True, - skip_measure=(not measure), - pershot=pershot, - ) - pcirc2, param2 = save_expval_circuit_parameterized( - pershot=pershot, - measure=measure, - snapshot=snapshot, - ) - circuits = [pcirc1] + circuits2to4 + [pcirc2] - if save_state: - for circuit in circuits: - circuit.save_statevector(pershot=pershot) - params = [param1, [], [], [], param2] - qobj = assemble(circuits, backend=backend, shots=shots, parameterizations=params) - return qobj - - def test_parameterized_qobj_qasm_save_expval(self): - """Test parameterized qobj with Expectation Value snapshot and qasm simulator.""" - shots = 1000 - labels = save_expval_labels() * 3 - counts_targets = save_expval_counts(shots) * 3 - value_targets = save_expval_pre_meas_values() * 3 - - backend = AerSimulator() - qobj = self.parameterized_qobj(backend=backend, shots=1000, measure=True, snapshot=True) - self.assertIn("parameterizations", qobj.to_dict()["config"]) - with self.assertWarns(DeprecationWarning): - job = backend.run(qobj, **self.BACKEND_OPTS) - result = job.result() - success = getattr(result, "success", False) - num_circs = len(result.to_dict()["results"]) - self.assertTrue(success) - self.compare_counts(result, range(num_circs), counts_targets, delta=0.1 * shots) - # Check snapshots - for j, target in enumerate(value_targets): - data = result.data(j) - for label in labels: - self.assertAlmostEqual(data[label], target[label], delta=1e-7) - - def test_parameterized_qobj_statevector(self): - """Test parameterized qobj with Expectation Value snapshot and qasm simulator.""" - statevec_targets = save_expval_final_statevecs() * 3 - - backend = AerSimulator(method="statevector") - qobj = self.parameterized_qobj( - backend=backend, - measure=False, - snapshot=False, - save_state=True, - ) - self.assertIn("parameterizations", qobj.to_dict()["config"]) - with self.assertWarns(DeprecationWarning): - job = backend.run(qobj, **self.BACKEND_OPTS) - result = job.result() - success = getattr(result, "success", False) - num_circs = len(result.to_dict()["results"]) - self.assertTrue(success) - - for j in range(num_circs): - statevector = result.get_statevector(j) - np.testing.assert_array_almost_equal( - statevector, statevec_targets[j].data, decimal=7 - ) - def test_run_path(self): """Test parameterized circuit path via backed.run()""" shots = 1000 diff --git a/test/terra/backends/test_runtime_parameterization.py b/test/terra/backends/test_runtime_parameterization.py index 24f8f0e4c3..3a0ba3da3c 100644 --- a/test/terra/backends/test_runtime_parameterization.py +++ b/test/terra/backends/test_runtime_parameterization.py @@ -56,89 +56,6 @@ class TestRuntimeParameterization(SimulatorTestCase): "runtime_parameter_bind_enable": True, } - @staticmethod - def runtime_parameterization( - backend, - shots=1000, - measure=True, - snapshot=False, - save_state=False, - ): - """Return ParameterizedQobj for settings.""" - pershot = shots == 1 - pcirc1, param1 = save_expval_circuit_parameterized( - pershot=pershot, - measure=measure, - snapshot=snapshot, - ) - circuits2to4 = save_expval_circuits( - pauli=True, - skip_measure=(not measure), - pershot=pershot, - ) - pcirc2, param2 = save_expval_circuit_parameterized( - pershot=pershot, - measure=measure, - snapshot=snapshot, - ) - circuits = [pcirc1] + circuits2to4 + [pcirc2] - if save_state: - for circuit in circuits: - circuit.save_statevector(pershot=pershot) - params = [param1, [], [], [], param2] - qobj = assemble(circuits, backend=backend, shots=shots, parameterizations=params) - return qobj - - def test_runtime_parameterization_qasm_save_expval(self): - """Test parameterized qobj with Expectation Value snapshot and qasm simulator.""" - shots = 1000 - labels = save_expval_labels() * 3 - counts_targets = save_expval_counts(shots) * 3 - value_targets = save_expval_pre_meas_values() * 3 - - backend = AerSimulator() - qobj = self.runtime_parameterization( - backend=backend, shots=1000, measure=True, snapshot=True - ) - self.assertIn("parameterizations", qobj.to_dict()["config"]) - with self.assertWarns(DeprecationWarning): - job = backend.run(qobj, **self.BACKEND_OPTS) - result = job.result() - success = getattr(result, "success", False) - num_circs = len(result.to_dict()["results"]) - self.assertTrue(success) - self.compare_counts(result, range(num_circs), counts_targets, delta=0.1 * shots) - # Check snapshots - for j, target in enumerate(value_targets): - data = result.data(j) - for label in labels: - self.assertAlmostEqual(data[label], target[label], delta=1e-7) - - def test_runtime_parameterization_statevector(self): - """Test parameterized qobj with Expectation Value snapshot and qasm simulator.""" - statevec_targets = save_expval_final_statevecs() * 3 - - backend = AerSimulator(method="statevector") - qobj = self.runtime_parameterization( - backend=backend, - measure=False, - snapshot=False, - save_state=True, - ) - self.assertIn("parameterizations", qobj.to_dict()["config"]) - with self.assertWarns(DeprecationWarning): - job = backend.run(qobj, **self.BACKEND_OPTS) - result = job.result() - success = getattr(result, "success", False) - num_circs = len(result.to_dict()["results"]) - self.assertTrue(success) - - for j in range(num_circs): - statevector = result.get_statevector(j) - np.testing.assert_array_almost_equal( - statevector, statevec_targets[j].data, decimal=7 - ) - @supported_methods(SUPPORTED_METHODS) def test_run_path(self, method, device): """Test parameterized circuit path via backed.run()""" diff --git a/test/terra/extensions/test_wrappers.py b/test/terra/extensions/test_wrappers.py index cc7e62b50a..04a75d7370 100644 --- a/test/terra/extensions/test_wrappers.py +++ b/test/terra/extensions/test_wrappers.py @@ -39,31 +39,6 @@ def test_pickleable(self): bites = pickle.dumps(cfunc) cahpy = pickle.loads(bites) - def _create_qobj(self, backend, noise_model=None): - num_qubits = 2 - circuit = QuantumCircuit(num_qubits) - circuit.x(list(range(num_qubits))) - circuit = transpile(circuit, backend) - opts = {"max_parallel_threads": 1, "library_dir": LIBRARY_DIR, "noise_model": noise_model} - qobj = backend._assemble(circuit, **opts) - return qobj - - def _map_and_test(self, cfunc, qobj): - n = 2 - with Pool(processes=1) as p: - rs = p.map(cfunc, [copy.deepcopy(qobj) for _ in range(n)]) - - self.assertEqual(len(rs), n) - for r in rs: - self.assertTrue(r["success"]) - - def test_mappable_qasm(self): - """Test that the qasm controller can be mapped.""" - cfunc = aer_controller_execute() - sim = AerSimulator() - fqobj = self._create_qobj(sim) - self._map_and_test(cfunc, fqobj) - if __name__ == "__main__": unittest.main() diff --git a/tools/generate_qobj.py b/tools/generate_qobj.py deleted file mode 100755 index 45006046c5..0000000000 --- a/tools/generate_qobj.py +++ /dev/null @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2021, IBM. -# -# This source code is licensed under the Apache License, Version 2.0 found in -# the LICENSE.txt file in the root directory of this source tree. - -import json -import os - -from qiskit import ClassicalRegister -from qiskit.compiler import assemble, transpile -from qiskit import QuantumCircuit -from qiskit import QuantumRegister - - -def grovers_circuit(final_measure=True, allow_sampling=True): - """Testing a circuit originated in the Grover algorithm""" - - circuits = [] - - # 6-qubit grovers - qr = QuantumRegister(6) - if final_measure: - cr = ClassicalRegister(2) - regs = (qr, cr) - else: - regs = (qr,) - circuit = QuantumCircuit(*regs) - - circuit.h(qr[0]) - circuit.h(qr[1]) - circuit.x(qr[2]) - circuit.x(qr[3]) - circuit.x(qr[0]) - circuit.cx(qr[0], qr[2]) - circuit.x(qr[0]) - circuit.cx(qr[1], qr[3]) - circuit.ccx(qr[2], qr[3], qr[4]) - circuit.cx(qr[1], qr[3]) - circuit.x(qr[0]) - circuit.cx(qr[0], qr[2]) - circuit.x(qr[0]) - circuit.x(qr[1]) - circuit.x(qr[4]) - circuit.h(qr[4]) - circuit.ccx(qr[0], qr[1], qr[4]) - circuit.h(qr[4]) - circuit.x(qr[0]) - circuit.x(qr[1]) - circuit.x(qr[4]) - circuit.h(qr[0]) - circuit.h(qr[1]) - circuit.h(qr[4]) - if final_measure: - circuit.barrier(qr) - circuit.measure(qr[0], cr[0]) - circuit.measure(qr[1], cr[1]) - if not allow_sampling: - circuit.barrier(qr) - circuit.id(qr) - circuits.append(circuit) - - return circuits - - -if __name__ == "__main__": - # Run qasm simulator - shots = 4000 - circuits = grovers_circuit(final_measure=True, allow_sampling=True) - if os.getenv("USE_MPI", False): - qobj = assemble(transpile(circuits), shots=shots, blocking_enable=True, blocking_qubits=2) - else: - qobj = assemble(transpile(circuits), shots=shots) - with open("qobj.json", "wt") as fp: - json.dump(qobj.to_dict(), fp) From 586abe86be85637c66ed112d95bf2492949ff10d Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Fri, 5 Jul 2024 10:51:26 +0900 Subject: [PATCH 02/13] format --- src/framework/operations.hpp | 3 +-- src/framework/pybind_json.hpp | 3 ++- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/framework/operations.hpp b/src/framework/operations.hpp index ba6136c6ef..1490986c7c 100644 --- a/src/framework/operations.hpp +++ b/src/framework/operations.hpp @@ -686,8 +686,7 @@ inline std::ostream &operator<<(std::ostream &s, const Op &op) { // Raise an exception if name string is empty inline void check_empty_name(const Op &op) { if (op.name.empty()) - throw std::invalid_argument( - R"(Invalid instruction ("name" is empty).)"); + throw std::invalid_argument(R"(Invalid instruction ("name" is empty).)"); } // Raise an exception if qubits list is empty diff --git a/src/framework/pybind_json.hpp b/src/framework/pybind_json.hpp index d1e2fba66e..0f23e7d0f2 100644 --- a/src/framework/pybind_json.hpp +++ b/src/framework/pybind_json.hpp @@ -222,7 +222,8 @@ void std::to_json(json_t &js, const py::handle &obj) { static py::object PyNoiseModel = py::module::import("qiskit_aer.noise.noise_model").attr("NoiseModel"); static py::object PyCircuitHeader = - py::module::import("qiskit_aer.backends.backend_utils").attr("CircuitHeader"); + py::module::import("qiskit_aer.backends.backend_utils") + .attr("CircuitHeader"); if (py::isinstance(obj)) { js = obj.cast(); } else if (py::isinstance(obj)) { From 07ef8175dce013fa7ce046db7e372334234ff1ea Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Fri, 5 Jul 2024 11:06:35 +0900 Subject: [PATCH 03/13] remove unused imports --- qiskit_aer/backends/aerbackend.py | 4 +- qiskit_aer/backends/statevector_simulator.py | 1 - qiskit_aer/backends/unitary_simulator.py | 1 - qiskit_aer/jobs/aerjob.py | 1 - qiskit_aer/jobs/aerjobset.py | 446 ------------------ qiskit_aer/jobs/utils.py | 3 - .../aer_simulator/test_job_splitting.py | 99 ---- 7 files changed, 1 insertion(+), 554 deletions(-) delete mode 100644 qiskit_aer/jobs/aerjobset.py delete mode 100644 test/terra/backends/aer_simulator/test_job_splitting.py diff --git a/qiskit_aer/backends/aerbackend.py b/qiskit_aer/backends/aerbackend.py index 985f4dcce4..00e1a73118 100644 --- a/qiskit_aer/backends/aerbackend.py +++ b/qiskit_aer/backends/aerbackend.py @@ -18,11 +18,9 @@ import logging import time import uuid -import warnings from abc import ABC, abstractmethod from qiskit.circuit import QuantumCircuit, ParameterExpression, Delay -from qiskit.compiler import assemble from qiskit.providers import BackendV2 as Backend from qiskit.providers import convert_to_target from qiskit.providers.models import BackendStatus @@ -144,7 +142,7 @@ def _convert_binds(self, circuits, parameter_binds, idx_maps=None): return parameterizations # pylint: disable=arguments-renamed - def run(self, circuits, validate=False, parameter_binds=None, **run_options): + def run(self, circuits, parameter_binds=None, **run_options): """Run circuits on the backend. Args: diff --git a/qiskit_aer/backends/statevector_simulator.py b/qiskit_aer/backends/statevector_simulator.py index 7bf293cc98..86baf9e068 100644 --- a/qiskit_aer/backends/statevector_simulator.py +++ b/qiskit_aer/backends/statevector_simulator.py @@ -17,7 +17,6 @@ import logging from warnings import warn -import psutil from qiskit.providers.options import Options from qiskit.providers.models import QasmBackendConfiguration diff --git a/qiskit_aer/backends/unitary_simulator.py b/qiskit_aer/backends/unitary_simulator.py index 555af9a581..231a444446 100644 --- a/qiskit_aer/backends/unitary_simulator.py +++ b/qiskit_aer/backends/unitary_simulator.py @@ -18,7 +18,6 @@ import logging from warnings import warn -import psutil from qiskit.providers.options import Options from qiskit.providers.models import QasmBackendConfiguration diff --git a/qiskit_aer/jobs/aerjob.py b/qiskit_aer/jobs/aerjob.py index 4c662ba8f8..775dff29cc 100644 --- a/qiskit_aer/jobs/aerjob.py +++ b/qiskit_aer/jobs/aerjob.py @@ -15,7 +15,6 @@ """This module implements the job class used for AerBackend objects.""" import logging -import warnings from qiskit.providers import JobV1 as Job from qiskit.providers import JobStatus, JobError diff --git a/qiskit_aer/jobs/aerjobset.py b/qiskit_aer/jobs/aerjobset.py deleted file mode 100644 index 16fae01eea..0000000000 --- a/qiskit_aer/jobs/aerjobset.py +++ /dev/null @@ -1,446 +0,0 @@ -# -*- coding: utf-8 -*- - -# This code is part of Qiskit. -# -# (C) Copyright IBM 2019, 2020. -# -# This code is licensed under the Apache License, Version 2.0. You may -# obtain a copy of this license in the LICENSE.txt file in the root directory -# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. -# -# Any modifications or derivative works of this code must retain this -# copyright notice, and modified files need to carry a notice indicating -# that they have been altered from the originals. - -# pylint: disable=arguments-differ - -"""A set of cluster jobs for Aer.""" - -from typing import List, Optional, Union, Tuple, Iterable -import time -import logging -import datetime -import uuid -from collections import Counter - -from qiskit.circuit import QuantumCircuit -from qiskit.pulse import Schedule -from qiskit.qobj import QasmQobj -from qiskit.providers import JobV1 as Job -from qiskit.providers import JobStatus, JobError -from qiskit.result import Result - -from .utils import DEFAULT_EXECUTOR, requires_submit -from .aerjob import AerJob - -logger = logging.getLogger(__name__) - - -class AerJobSet(Job): - """A set of :class:`~AerJob` classes for Aer simulators. - - An instance of this class is returned when you submit experiments with - executor option. It provides methods that allow you to interact - with the jobs as a single entity. For example, you can retrieve the results - for all of the jobs using :meth:`result()` and cancel all jobs using - :meth:`cancel()`. - """ - - def __init__(self, backend, job_id, fn, experiments: List[QasmQobj], executor=None): - """AerJobSet constructor. - - Args: - backend(Aerbackend): Aerbackend. - job_id(int): Job Id. - fn(function): a callable function to execute qobj on backend. - This should usually be a bound :meth:`AerBackend._run()` method, - with the signature `(qobj: QasmQobj, job_id: str) -> Result`. - experiments(List[QasmQobj]): List[QasmQobjs] to execute. - executor(ThreadPoolExecutor or dask.distributed.client): - The executor to be used to submit the job. - """ - super().__init__(backend, job_id) - self._experiments = experiments - - # Used for caching - self._future = None - self._futures = [] - self._results = None - self._fn = fn - self._executor = executor or DEFAULT_EXECUTOR - self._start_time = None - self._end_time = None - self._combined_result = [] - - def submit(self): - """Execute this set of jobs on an executor. - - Raises: - RuntimeError: If the jobs were already submitted. - """ - if self._futures: - raise RuntimeError("The jobs for this managed job set have already been submitted.") - - self._future = True - worker_id = 0 - self._start_time = datetime.datetime.now() - for experiments in self._experiments: - _worker_id_list = [] - for exp in experiments: - job_id = str(uuid.uuid4()) - logger.debug("Job %s submitted", worker_id) - aer_job = AerJob(self._backend, job_id, self._fn, exp, self._executor) - aer_job.submit() - aer_job._future.add_done_callback(self._set_end_time) - self._futures.append(aer_job) - _worker_id_list.append(worker_id) - worker_id = worker_id + 1 - self._combined_result.append(_worker_id_list) - - @requires_submit - def status(self, worker: Union[None, int, Iterable[int]]) -> Union[JobStatus, List[JobStatus]]: - """Return the status of each job in this set. - - Args - worker: Worker id. When None, all workers' statuses are returned. - - Returns: - A list of job statuses. - """ - if isinstance(worker, int): - aer_job = self._futures[worker] - return aer_job.status() - elif isinstance(worker, Iterable): - job_list = [] - for worker_id in worker: - aer_job = self._futures[worker_id] - job_list.append(aer_job.status()) - return job_list - else: - return [aer.status() for aer in self._futures] - - @requires_submit - def result( - self, - timeout: Optional[float] = None, - ) -> Result: - """Return the results of the jobs as a single Result object. - - This call will block until all job results become available or - the timeout is reached. - - Args: - timeout: Number of seconds to wait for job results. - - Returns: - qiskit.Result: Result object - - Raises: - JobError: if unable to retrieve all job results before the - specified timeout. - - """ - res = self.worker_results(worker=None, timeout=timeout) - return res - - @requires_submit - def worker_results( - self, - worker: Union[None, int, Iterable[int]], - timeout: Optional[float] = None, - ) -> Union[Result, List[Result]]: - """Return the result of the jobs specified with worker_id. - - When the worker is None, this call return all worker's result. - - Args: - worker: Worker id to wait for job result. - timeout: Number of seconds to wait for job results. - - Returns: - qiskit.Result: Result object - instance that can be used to retrieve results - for individual experiments. - - Raises: - JobError: if unable to retrieve all job results before the - specified timeout. - """ - - # We'd like to use futures.as_completed or futures.wait - # however this excludes the use of dask as executor - # because dask's futures are not ~exactly~ the same. - res = [] - - if isinstance(worker, int): - res = self._get_worker_result(worker, timeout) - elif isinstance(worker, Iterable): - _res = [] - for worker_id in worker: - _res.append(self._get_worker_result(worker_id, timeout)) - res = self._combine_results(_res) - else: - for _worker_id_list in self._combined_result: - _res = [] - for worker_id in _worker_id_list: - _res.append(self._get_worker_result(worker_id, timeout)) - res.append(self._combine_results(_res)) - - res = self._accumulate_experiment_results(res) - return self._combine_job_results(res) - - def _get_worker_result(self, worker: int, timeout: Optional[float] = None): - """Return the result of the jobs specified with worker_id. - - this call return all worker's result specified worker and - block until job result become available or the timeout is reached. - Analogous to dask.client.gather() - - Args: - worker: Worker id to wait for job result. - timeout: Number of seconds to wait for job results. - - Returns: - qiskit.Result: Result object - instance that can be used to retrieve a result. - - Raises: - JobError: if unable to retrieve all job results before the - specified timeout. - """ - start_time = time.time() - original_timeout = timeout - aer_job = self._futures[worker] - - try: - result = aer_job.result(timeout=timeout) - if result is None or not result.success: - if result: - logger.warning("AerJobSet %s Error: %s", aer_job.name(), result.header) - else: - logger.warning("AerJobSet %s did not return a result", aer_job.name()) - except JobError as ex: - raise JobError( - "Timeout while waiting for the results of experiment {}".format(aer_job.name()) - ) from ex - - if timeout: - timeout = original_timeout - (time.time() - start_time) - if timeout <= 0: - raise JobError("Timeout while waiting for JobSet results") - return result - - def _combine_job_results(self, result_list: List[Result]): - if len(result_list) == 1: - return result_list[0] - - master_result = result_list[0] - _merge_result_list = [] - - for _result in result_list[1:]: - for _master_result, _sub_result in zip(master_result.results, _result.results): - _merge_result_list.append(self._merge_exp(_master_result, _sub_result)) - master_result.results = _merge_result_list - return master_result - - def _accumulate_experiment_results(self, results: List[Result]): - """Merge all experiments into a single in a`Result` - - this function merges the counts and the number of shots - from each experiment in a `Result` for a noise simulation - if `id` in metadata field is the same. - - Args: - results: Result list whose experiments will be combined. - - Returns: - list: Result list - - Raises: - JobError: If results do not have count or memory data - """ - results_list = [] - for each_result in results: - _merge_results = [] - master_id = None - master_result = None - - for _result in each_result.results: - if not hasattr(_result.data, "counts") and not hasattr(_result.data, "memory"): - raise JobError("Results do not include counts or memory data") - meta_data = getattr(_result.header, "metadata", None) - if meta_data and "id" in meta_data: - _id = meta_data["id"] - if master_id == _id: - master_result = self._merge_exp(master_result, _result) - else: - master_id = _id - master_result = _result - _merge_results.append(master_result) - else: - _merge_results.append(_result) - each_result.results = _merge_results - results_list.append(each_result) - return results_list - - def _merge_exp(self, master: Result, sub: Result): - master.shots = master.shots + sub.shots - if hasattr(master.data, "counts"): - master.data.counts = Counter(master.data.counts) + Counter(sub.data.counts) - - if hasattr(master.data, "memory"): - master.data.memory = master.data.memory + sub.data.memory - - return master - - def _combine_results(self, results: List[Union[Result, None]] = None) -> Result: - """Combine results from all jobs into a single `Result`. - - Note: - Since the order of the results must match the order of the initial - experiments, job results can only be combined if all jobs succeeded. - - Args: - results: Result will be combined. - Returns: - A :class:`~qiskit.result.Result` object that contains results from - all jobs. - Raises: - JobError: If results cannot be combined because some jobs failed. - """ - if not results: - raise JobError("Results cannot be combined - no results.") - - # find first non-null result and copy it's config - _result = next((r for r in results if r is not None), None) - - if _result: - combined_result = { - "backend_name": _result.backend_name, - "backend_version": _result.backend_version, - "qobj_id": _result.qobj_id, - "job_id": _result.job_id, - "success": _result.success, - } - combined_result["results"] = [] - if hasattr(_result, "status"): - combined_result["status"] = _result.status - if hasattr(_result, "header"): - combined_result["header"] = _result.header.to_dict() - combined_result.update(_result._metadata) - else: - raise JobError("Results cannot be combined - no results.") - - for each_result in results: - if each_result is not None: - combined_result["results"].extend(x.to_dict() for x in each_result.results) - - if self._end_time is None: - self._end_time = datetime.datetime.now() - - if self._start_time: - _time_taken = self._end_time - self._start_time - combined_result["time_taken"] = _time_taken.total_seconds() - else: - combined_result["time_taken"] = 0 - - combined_result["date"] = datetime.datetime.isoformat(self._end_time) - return Result.from_dict(combined_result) - - @requires_submit - def cancel(self) -> None: - """Cancel all jobs in this job set.""" - for aer_job in self._futures: - aer_job.cancel() - - @requires_submit - def job(self, experiment: Union[str, QuantumCircuit, Schedule]) -> Tuple[AerJob, int]: - """Retrieve the job used to submit the specified experiment and its index. - - Args: - experiment: Retrieve the job used to submit this experiment. Several - types are accepted for convenience: - - * str: The name of the experiment. - * QuantumCircuit: The name of the circuit instance will be used. - * Schedule: The name of the schedule instance will be used. - - Returns: - A tuple of the job used to submit the experiment and the experiment index. - - Raises: - JobError: If the job for the experiment could not be found. - """ - worker_index = self.worker(experiment) - return self.worker_job(worker_index) - - @requires_submit - def worker(self, experiment: Union[str, QuantumCircuit, Schedule]) -> Union[int, List[int]]: - """Retrieve the index of job. - - Args: - experiment: Retrieve the job used to submit this experiment. Several - types are accepted for convenience: - - * str: The name of the experiment. - * QuantumCircuit: The name of the circuit instance will be used. - * Schedule: The name of the schedule instance will be used. - - Returns: - list or integer value of the job id - - Raises: - JobError: If the job for the experiment could not be found. - """ - - if isinstance(experiment, (QuantumCircuit, Schedule)): - experiment = experiment.name - job_list = [] - for job in self._futures: - for i, exp in enumerate(job.qobj().experiments): - if hasattr(exp.header, "name") and exp.header.name == experiment: - job_list.append(i) - - if len(job_list) == 1: - return job_list[0] - elif len(job_list) > 1: - return job_list - - raise JobError("Unable to find the job for experiment {}.".format(experiment)) - - @requires_submit - def worker_job(self, worker: Union[None, int, Iterable[int]]) -> Union[AerJob, List[AerJob]]: - """Retrieve the job specified with job's id - - Args: - worker: retrive job used to submit with this job id. - - Returns: - A list of :class:`~qiskit_aer.AerJob` - instances that represents the submitted jobs. - - Raises: - JobError: If the job for the experiment could not be found. - """ - aer_jobs = [] - if isinstance(worker, int): - return self._futures[worker] - elif isinstance(worker, Iterable): - for worker_id in worker: - aer_jobs.append(self._futures[worker_id]) - return aer_jobs - else: - return self._futures - - def _set_end_time(self, future): - """Set job's end time to calculate "time_taken" value - - Args: - future(concurrent.futures or dask.distributed.futures): callback future object - """ - # pylint: disable=unused-argument - self._end_time = datetime.datetime.now() - - def executor(self): - """Return the executor for this job""" - return self._executor diff --git a/qiskit_aer/jobs/utils.py b/qiskit_aer/jobs/utils.py index 83a3970391..0070e7c45d 100644 --- a/qiskit_aer/jobs/utils.py +++ b/qiskit_aer/jobs/utils.py @@ -11,9 +11,6 @@ # that they have been altered from the originals. """Utility functions for Aer job management.""" -import uuid -import copy -from math import ceil from functools import singledispatch, update_wrapper, wraps from concurrent.futures import ThreadPoolExecutor diff --git a/test/terra/backends/aer_simulator/test_job_splitting.py b/test/terra/backends/aer_simulator/test_job_splitting.py deleted file mode 100644 index 148a070904..0000000000 --- a/test/terra/backends/aer_simulator/test_job_splitting.py +++ /dev/null @@ -1,99 +0,0 @@ -# This code is part of Qiskit. -# -# (C) Copyright IBM 2018, 2019. -# -# This code is licensed under the Apache License, Version 2.0. You may -# obtain a copy of this license in the LICENSE.txt file in the root directory -# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. -# -# Any modifications or derivative works of this code must retain this -# copyright notice, and modified files need to carry a notice indicating -# that they have been altered from the originals. - -# pylint: disable=arguments-differ - -import unittest -import logging -from ddt import ddt, data - -from qiskit import transpile, assemble -from qiskit.providers import JobError -from qiskit.circuit.random import random_circuit -from qiskit_aer.jobs import split_qobj -from test.terra.reference.ref_save_expval import ( - save_expval_circuits, - save_expval_circuit_parameterized, -) -from test.terra.backends.simulator_test_case import SimulatorTestCase - - -@ddt -class TestJobSplitting(SimulatorTestCase): - """Test job splitting option""" - - @staticmethod - def parameterized_circuits(): - """Return ParameterizedQobj for settings.""" - pcirc1, param1 = save_expval_circuit_parameterized( - pershot=False, - measure=True, - snapshot=False, - ) - circuits2to4 = save_expval_circuits( - pauli=True, - skip_measure=False, - pershot=False, - ) - pcirc2, param2 = save_expval_circuit_parameterized( - pershot=False, - measure=True, - snapshot=False, - ) - circuits = [pcirc1] + circuits2to4 + [pcirc2] - params = [param1, [], [], [], param2] - return circuits, params - - def split_compare(self, circs, backend, parameterizations=None): - """Qobj split test""" - qobj = assemble(circs, parameterizations=parameterizations, qobj_id="testing") - if parameterizations: - qobjs = [ - assemble(c, parameterizations=[p], qobj_id="testing") - for (c, p) in zip(circs, parameterizations) - ] - else: - qobjs = [assemble(c, qobj_id="testing") for c in circs] - - test_qobjs = split_qobj(qobj, max_size=1, qobj_id="testing") - self.assertEqual(len(test_qobjs[0]), len(qobjs)) - for ref, test in zip(qobjs, test_qobjs[0]): - self.assertEqual(ref, test) - - def add_custom_instruction(self): - backend = self.backend(max_job_size=1, max_shot_size=1) - circ = random_circuit(num_qubits=2, depth=4) - circ.save_statevector() - circ = transpile(circ, backend) - qobj = assemble(circ) - split_qobj(qobj, max_size=1, max_shot_size=1, qobj_id="testing") - - def test_split(self): - """Circuits split test""" - backend = self.backend(max_job_size=1) - circs = [random_circuit(num_qubits=2, depth=4, measure=True, seed=i) for i in range(2)] - circs = transpile(circs, backend) - self.split_compare(circs, backend) - - def test_parameterized_split(self): - """Parameterized circuits split test""" - backend = self.backend(max_job_size=1) - circs, params = self.parameterized_circuits() - self.split_compare(circs, backend, parameterizations=params) - - def test_custom_instruction_error(self): - with self.assertRaises(JobError): - self.add_custom_instruction() - - -if __name__ == "__main__": - unittest.main() From 61d63199c9695e7bcd45da9574dc8a758da629c1 Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Fri, 5 Jul 2024 11:09:22 +0900 Subject: [PATCH 04/13] remove unused imports --- qiskit_aer/backends/aerbackend.py | 1 - 1 file changed, 1 deletion(-) diff --git a/qiskit_aer/backends/aerbackend.py b/qiskit_aer/backends/aerbackend.py index 00e1a73118..500cc519a4 100644 --- a/qiskit_aer/backends/aerbackend.py +++ b/qiskit_aer/backends/aerbackend.py @@ -148,7 +148,6 @@ def run(self, circuits, parameter_binds=None, **run_options): Args: circuits (QuantumCircuit or list): The QuantumCircuit (or list of QuantumCircuit objects) to run - validate (bool): validate before running (default: False). parameter_binds (list): A list of parameter binding dictionaries. See additional information (default: None). run_options (kwargs): additional run time backend options. From fc1c8510c3d761f9c8a36667cecf068a69bfc6a8 Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Fri, 5 Jul 2024 14:15:33 +0900 Subject: [PATCH 05/13] restore neccesary files --- CMakeLists.txt | 4 - test/__init__.py | 15 +++ test/asv.linux.conf.json | 191 ++++++++++++++++++++++++++++++++++ test/asv.linux.cuda.conf.json | 191 ++++++++++++++++++++++++++++++++++ 4 files changed, 397 insertions(+), 4 deletions(-) create mode 100644 test/__init__.py create mode 100644 test/asv.linux.conf.json create mode 100644 test/asv.linux.cuda.conf.json diff --git a/CMakeLists.txt b/CMakeLists.txt index b2b754b5d6..2bbda7dccf 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -544,7 +544,3 @@ set(AER_COMPILER_DEFINITIONS ${AER_COMPILER_DEFINITIONS} ${CONAN_DEFINES}) add_subdirectory(qiskit_aer/backends/wrappers) -# Tests -if(BUILD_TESTS) - add_subdirectory(test) -endif() diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 0000000000..a0e6fee90c --- /dev/null +++ b/test/__init__.py @@ -0,0 +1,15 @@ +# This code is part of Qiskit. +# +# (C) Copyright IBM 2018, 2019. +# +# This code is licensed under the Apache License, Version 2.0. You may +# obtain a copy of this license in the LICENSE.txt file in the root directory +# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. +# +# Any modifications or derivative works of this code must retain this +# copyright notice, and modified files need to carry a notice indicating +# that they have been altered from the originals. + +""" +Aer tests +""" diff --git a/test/asv.linux.conf.json b/test/asv.linux.conf.json new file mode 100644 index 0000000000..c5c72ec389 --- /dev/null +++ b/test/asv.linux.conf.json @@ -0,0 +1,191 @@ +// To use this configuration for running the benchmarks, we have to run asv like this: +// $ asv --connfig asv.linux.conf.json +{ + // The version of the config file format. Do not change, unless + // you know what you are doing. + "version": 1, + + // The name of the project being benchmarked + "project": "qiskit-aer", + + // The project's homepage + "project_url": "http://qiskit.org/aer", + + // The URL or local path of the source code repository for the + // project being benchmarked + "repo": "../", + + // The Python project's subdirectory in your repo. If missing or + // the empty string, the project is assumed to be located at the root + // of the repository. + // "repo_subdir": "", + + // Customizable commands for building, installing, and + // uninstalling the project. See asv.conf.json documentation. + // + // "install_command": ["python -mpip install {wheel_file}"], + // "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"], + // "build_command": [ + // "python setup.py build", + // "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}" + // ], + + "install_command": [ + "python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit', True)\"", + "python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit_aer.egg-info', True)\"", + "pip install git+https://github.com/Qiskit/qiskit", + "pip install git+https://github.com/Qiskit/qiskit-aqua", + "python -mpip install {wheel_file}" + ], + "uninstall_command": [ + "return-code=any python -mpip uninstall -y qiskit", + "return-code=any python -mpip uninstall -y qiskit-aqua", + "return-code=any python -mpip uninstall -y {project}" + ], + "build_command": [ + "python -mpip install -U scikit-build", + "pip install git+https://github.com/Qiskit/qiskit", + "pip install git+https://github.com/Qiskit/qiskit-aqua", + "pip install pyscf", + "pip install matplotlib", + "python setup.py bdist_wheel --dist-dir={build_cache_dir} -- -DCMAKE_CXX_COMPILER=g++ -- -j" + ], + + // List of branches to benchmark. If not provided, defaults to "main" + // (for git) or "default" (for mercurial). + // "branches": ["main"], // for git + // "branches": ["default"], // for mercurial + + // The DVCS being used. If not set, it will be automatically + // determined from "repo" by looking at the protocol in the URL + // (if remote), or by looking for special directories, such as + // ".git" (if local). + // "dvcs": "git", + + // The tool to use to create environments. May be "conda", + // "virtualenv" or other value depending on the plugins in use. + // If missing or the empty string, the tool will be automatically + // determined by looking for tools on the PATH environment + // variable. + "environment_type": "conda", + + // timeout in seconds for installing any dependencies in environment + // defaults to 10 min + //"install_timeout": 600, + + // the base URL to show a commit for the project. + // "show_commit_url": "http://github.com/owner/project/commit/", + + // The Pythons you'd like to test against. If not provided, defaults + // to the current version of Python used to run `asv`. + // "pythons": ["3.7", "3.8", "3.9"], + + // The list of conda channel names to be searched for benchmark + // dependency packages in the specified order + // "conda_channels": ["conda-forge", "defaults"] + + // The matrix of dependencies to test. Each key is the name of a + // package (in PyPI) and the values are version numbers. An empty + // list or empty string indicates to just test against the default + // (latest) version. null indicates that the package is to not be + // installed. If the package to be tested is only available from + // PyPi, and the 'environment_type' is conda, then you can preface + // the package name by 'pip+', and the package will be installed via + // pip (with all the conda available packages installed first, + // followed by the pip installed packages). + // + // "matrix": { + // "numpy": ["1.6", "1.7"], + // "six": ["", null], // test with and without six installed + // "pip+emcee": [""], // emcee is only available for install with pip. + // }, + + //"matrix": { + // "pip+qiskit": [""], + //}, + + // Combinations of libraries/python versions can be excluded/included + // from the set to test. Each entry is a dictionary containing additional + // key-value pairs to include/exclude. + // + // An exclude entry excludes entries where all values match. The + // values are regexps that should match the whole string. + // + // An include entry adds an environment. Only the packages listed + // are installed. The 'python' key is required. The exclude rules + // do not apply to includes. + // + // In addition to package names, the following keys are available: + // + // - python + // Python version, as in the *pythons* variable above. + // - environment_type + // Environment type, as above. + // - sys_platform + // Platform, as in sys.platform. Possible values for the common + // cases: 'linux2', 'win32', 'cygwin', 'darwin'. + // + // "exclude": [ + // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows + // {"environment_type": "conda", "six": null}, // don't run without six on conda + // ], + // + // "include": [ + // // additional env for python2.7 + // {"python": "2.7", "numpy": "1.8"}, + // // additional env if run on windows+conda + // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""}, + // ], + + // The directory (relative to the current directory) that benchmarks are + // stored in. If not provided, defaults to "benchmarks" + "benchmark_dir": "benchmark", + + + // The directory (relative to the current directory) to cache the Python + // environments in. If not provided, defaults to "env" + // "env_dir": "env", + "env_dir": ".asv/envs", + + // The directory (relative to the current directory) that raw benchmark + // results are stored in. If not provided, defaults to "results". + // "results_dir": "results", + "results_dir": ".asv/results", + + // The directory (relative to the current directory) that the html tree + // should be written to. If not provided, defaults to "html". + // "html_dir": "html", + "html_dir": ".asv/html" + + // The number of characters to retain in the commit hashes. + // "hash_length": 8, + + // `asv` will cache results of the recent builds in each + // environment, making them faster to install next time. This is + // the number of builds to keep, per environment. + // "build_cache_size": 2, + + // The commits after which the regression search in `asv publish` + // should start looking for regressions. Dictionary whose keys are + // regexps matching to benchmark names, and values corresponding to + // the commit (exclusive) after which to start looking for + // regressions. The default is to start from the first commit + // with results. If the commit is `null`, regression detection is + // skipped for the matching benchmark. + // + // "regressions_first_commits": { + // "some_benchmark": "352cdf", // Consider regressions only after this commit + // "another_benchmark": null, // Skip regression detection altogether + // }, + + // The thresholds for relative change in results, after which `asv + // publish` starts reporting regressions. Dictionary of the same + // form as in ``regressions_first_commits``, with values + // indicating the thresholds. If multiple entries match, the + // maximum is taken. If no entry matches, the default is 5%. + // + // "regressions_thresholds": { + // "some_benchmark": 0.01, // Threshold of 1% + // "another_benchmark": 0.5, // Threshold of 50% + // }, +} diff --git a/test/asv.linux.cuda.conf.json b/test/asv.linux.cuda.conf.json new file mode 100644 index 0000000000..317c8842c7 --- /dev/null +++ b/test/asv.linux.cuda.conf.json @@ -0,0 +1,191 @@ +// To use this configuration for running the benchmarks, we have to run asv like this: +// $ asv --connfig asv.linux.conf.json +{ + // The version of the config file format. Do not change, unless + // you know what you are doing. + "version": 1, + + // The name of the project being benchmarked + "project": "qiskit-aer", + + // The project's homepage + "project_url": "http://qiskit.org/aer", + + // The URL or local path of the source code repository for the + // project being benchmarked + "repo": "../", + + // The Python project's subdirectory in your repo. If missing or + // the empty string, the project is assumed to be located at the root + // of the repository. + // "repo_subdir": "", + + // Customizable commands for building, installing, and + // uninstalling the project. See asv.conf.json documentation. + // + // "install_command": ["python -mpip install {wheel_file}"], + // "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"], + // "build_command": [ + // "python setup.py build", + // "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}" + // ], + + "install_command": [ + "python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit', True)\"", + "python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit_aer.egg-info', True)\"", + "pip install git+https://github.com/Qiskit/qiskit", + "pip install git+https://github.com/Qiskit/qiskit-aqua", + "python -mpip install {wheel_file}" + ], + "uninstall_command": [ + "return-code=any python -mpip uninstall -y qiskit", + "return-code=any python -mpip uninstall -y qiskit-aqua", + "return-code=any python -mpip uninstall -y {project}" + ], + "build_command": [ + "python -mpip install -U scikit-build", + "pip install git+https://github.com/Qiskit/qiskit", + "pip install git+https://github.com/Qiskit/qiskit-aqua", + "pip install pyscf", + "pip install matplotlib", + "python setup.py bdist_wheel --dist-dir={build_cache_dir} -- -DCMAKE_CXX_COMPILER=g++ -DAER_THRUST_BACKEND=CUDA -- -j" + ], + + // List of branches to benchmark. If not provided, defaults to "main" + // (for git) or "default" (for mercurial). + // "branches": ["main"], // for git + // "branches": ["default"], // for mercurial + + // The DVCS being used. If not set, it will be automatically + // determined from "repo" by looking at the protocol in the URL + // (if remote), or by looking for special directories, such as + // ".git" (if local). + // "dvcs": "git", + + // The tool to use to create environments. May be "conda", + // "virtualenv" or other value depending on the plugins in use. + // If missing or the empty string, the tool will be automatically + // determined by looking for tools on the PATH environment + // variable. + "environment_type": "conda", + + // timeout in seconds for installing any dependencies in environment + // defaults to 10 min + //"install_timeout": 600, + + // the base URL to show a commit for the project. + // "show_commit_url": "http://github.com/owner/project/commit/", + + // The Pythons you'd like to test against. If not provided, defaults + // to the current version of Python used to run `asv`. + // "pythons": ["2.7", "3.6"], + + // The list of conda channel names to be searched for benchmark + // dependency packages in the specified order + // "conda_channels": ["conda-forge", "defaults"] + + // The matrix of dependencies to test. Each key is the name of a + // package (in PyPI) and the values are version numbers. An empty + // list or empty string indicates to just test against the default + // (latest) version. null indicates that the package is to not be + // installed. If the package to be tested is only available from + // PyPi, and the 'environment_type' is conda, then you can preface + // the package name by 'pip+', and the package will be installed via + // pip (with all the conda available packages installed first, + // followed by the pip installed packages). + // + // "matrix": { + // "numpy": ["1.6", "1.7"], + // "six": ["", null], // test with and without six installed + // "pip+emcee": [""], // emcee is only available for install with pip. + // }, + + //"matrix": { + // "pip+qiskit": [""], + //}, + + // Combinations of libraries/python versions can be excluded/included + // from the set to test. Each entry is a dictionary containing additional + // key-value pairs to include/exclude. + // + // An exclude entry excludes entries where all values match. The + // values are regexps that should match the whole string. + // + // An include entry adds an environment. Only the packages listed + // are installed. The 'python' key is required. The exclude rules + // do not apply to includes. + // + // In addition to package names, the following keys are available: + // + // - python + // Python version, as in the *pythons* variable above. + // - environment_type + // Environment type, as above. + // - sys_platform + // Platform, as in sys.platform. Possible values for the common + // cases: 'linux2', 'win32', 'cygwin', 'darwin'. + // + // "exclude": [ + // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows + // {"environment_type": "conda", "six": null}, // don't run without six on conda + // ], + // + // "include": [ + // // additional env for python2.7 + // {"python": "2.7", "numpy": "1.8"}, + // // additional env if run on windows+conda + // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""}, + // ], + + // The directory (relative to the current directory) that benchmarks are + // stored in. If not provided, defaults to "benchmarks" + "benchmark_dir": "benchmark", + + + // The directory (relative to the current directory) to cache the Python + // environments in. If not provided, defaults to "env" + // "env_dir": "env", + "env_dir": ".asv/envs", + + // The directory (relative to the current directory) that raw benchmark + // results are stored in. If not provided, defaults to "results". + // "results_dir": "results", + "results_dir": ".asv/results", + + // The directory (relative to the current directory) that the html tree + // should be written to. If not provided, defaults to "html". + // "html_dir": "html", + "html_dir": ".asv/html" + + // The number of characters to retain in the commit hashes. + // "hash_length": 8, + + // `asv` will cache results of the recent builds in each + // environment, making them faster to install next time. This is + // the number of builds to keep, per environment. + // "build_cache_size": 2, + + // The commits after which the regression search in `asv publish` + // should start looking for regressions. Dictionary whose keys are + // regexps matching to benchmark names, and values corresponding to + // the commit (exclusive) after which to start looking for + // regressions. The default is to start from the first commit + // with results. If the commit is `null`, regression detection is + // skipped for the matching benchmark. + // + // "regressions_first_commits": { + // "some_benchmark": "352cdf", // Consider regressions only after this commit + // "another_benchmark": null, // Skip regression detection altogether + // }, + + // The thresholds for relative change in results, after which `asv + // publish` starts reporting regressions. Dictionary of the same + // form as in ``regressions_first_commits``, with values + // indicating the thresholds. If multiple entries match, the + // maximum is taken. If no entry matches, the default is 5%. + // + // "regressions_thresholds": { + // "some_benchmark": 0.01, // Threshold of 1% + // "another_benchmark": 0.5, // Threshold of 50% + // }, +} From 3b5e3eff2618cbadce885f1d5f1ebb0fb8bfcf62 Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Fri, 5 Jul 2024 14:30:46 +0900 Subject: [PATCH 06/13] remove AerJobSet --- test/terra/backends/aer_simulator/test_executors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/terra/backends/aer_simulator/test_executors.py b/test/terra/backends/aer_simulator/test_executors.py index 28fc99d57c..42231afe1b 100644 --- a/test/terra/backends/aer_simulator/test_executors.py +++ b/test/terra/backends/aer_simulator/test_executors.py @@ -26,7 +26,7 @@ from qiskit.quantum_info import Statevector from qiskit_aer.noise.noise_model import AerJSONEncoder from test.terra.reference import ref_kraus_noise -from qiskit_aer.jobs import AerJob, AerJobSet +from qiskit_aer.jobs import AerJob from test.terra.backends.simulator_test_case import SimulatorTestCase, supported_methods From a2f1a0ac0c392a97be6dc4fb2bd69714b6ddb63c Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Fri, 5 Jul 2024 16:31:21 +0900 Subject: [PATCH 07/13] remove assemble --- qiskit_aer/library/save_instructions/save_data.py | 9 --------- qiskit_aer/noise/errors/quantum_error.py | 13 ++++++++++--- test/terra/backends/simulator_test_case.py | 2 -- test/terra/backends/test_parameterized_circuit.py | 2 +- .../terra/backends/test_runtime_parameterization.py | 2 +- tools/verify_wheels.py | 2 +- 6 files changed, 13 insertions(+), 17 deletions(-) diff --git a/qiskit_aer/library/save_instructions/save_data.py b/qiskit_aer/library/save_instructions/save_data.py index 7a1b6b77b9..136d17a993 100644 --- a/qiskit_aer/library/save_instructions/save_data.py +++ b/qiskit_aer/library/save_instructions/save_data.py @@ -58,15 +58,6 @@ def __init__(self, name, num_qubits, label, subtype="single", params=None): self._label = label self._subtype = subtype - def assemble(self): - """Return the QasmQobjInstruction for the intructions.""" - instr = super().assemble() - # Use same fields as Snapshot instruction - # so we dont need to modify QasmQobjInstruction - instr.snapshot_type = self._subtype - instr.label = self._label - return instr - def inverse(self, annotated=False): """Special case. Return self.""" return copy.copy(self) diff --git a/qiskit_aer/noise/errors/quantum_error.py b/qiskit_aer/noise/errors/quantum_error.py index 9d3d72e1fb..fac98c0bd7 100644 --- a/qiskit_aer/noise/errors/quantum_error.py +++ b/qiskit_aer/noise/errors/quantum_error.py @@ -312,9 +312,16 @@ def to_dict(self): for circ in self._circs: circ_inst = [] for inst in circ.data: - qobj_inst = inst.operation.assemble() - qobj_inst.qubits = [circ.find_bit(q).index for q in inst.qubits] - circ_inst.append(qobj_inst.to_dict()) + inst_dict = {} + inst_dict["name"] = inst.operation.name + inst_dict["qubits"] = [circ.find_bit(q).index for q in inst.qubits] + if inst.operation.params: + inst_dict["params"] = inst.operation.params + if inst.operation.label: + inst_dict["label"] = inst.operation.label + if inst.operation.condition: + inst_dict["condition"] = inst.operation.condition + circ_inst.append(inst_dict) instructions.append(circ_inst) # Construct error dict error = { diff --git a/test/terra/backends/simulator_test_case.py b/test/terra/backends/simulator_test_case.py index c075ff0140..f83a5b2aef 100644 --- a/test/terra/backends/simulator_test_case.py +++ b/test/terra/backends/simulator_test_case.py @@ -19,8 +19,6 @@ from qiskit_aer import AerSimulator from test.terra.common import QiskitAerTestCase from qiskit.circuit import QuantumCircuit -from qiskit.compiler import assemble - class SimulatorTestCase(QiskitAerTestCase): """Simulator test class""" diff --git a/test/terra/backends/test_parameterized_circuit.py b/test/terra/backends/test_parameterized_circuit.py index 9a9290f202..d087ef79ac 100644 --- a/test/terra/backends/test_parameterized_circuit.py +++ b/test/terra/backends/test_parameterized_circuit.py @@ -20,7 +20,7 @@ from test.terra import common -from qiskit.compiler import assemble, transpile +from qiskit.compiler import transpile from qiskit.circuit import QuantumCircuit, Parameter from test.terra.reference.ref_save_expval import ( save_expval_circuits, diff --git a/test/terra/backends/test_runtime_parameterization.py b/test/terra/backends/test_runtime_parameterization.py index 3a0ba3da3c..e1940fe727 100644 --- a/test/terra/backends/test_runtime_parameterization.py +++ b/test/terra/backends/test_runtime_parameterization.py @@ -22,7 +22,7 @@ from test.terra import common -from qiskit.compiler import assemble, transpile +from qiskit.compiler import transpile from qiskit.circuit import QuantumCircuit, Parameter from test.terra.reference.ref_save_expval import ( save_expval_circuits, diff --git a/tools/verify_wheels.py b/tools/verify_wheels.py index 7a1004b8bb..8b66788f5b 100644 --- a/tools/verify_wheels.py +++ b/tools/verify_wheels.py @@ -8,7 +8,7 @@ import numpy as np from qiskit import ClassicalRegister -from qiskit.compiler import assemble, transpile +from qiskit.compiler import transpile from qiskit import QuantumCircuit from qiskit import QuantumRegister from qiskit.quantum_info import Operator, Statevector From f568e716467bc00b80be919810314982a164a7f3 Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Fri, 5 Jul 2024 16:38:07 +0900 Subject: [PATCH 08/13] format --- test/terra/backends/simulator_test_case.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/terra/backends/simulator_test_case.py b/test/terra/backends/simulator_test_case.py index f83a5b2aef..e04ec8dcbc 100644 --- a/test/terra/backends/simulator_test_case.py +++ b/test/terra/backends/simulator_test_case.py @@ -20,6 +20,7 @@ from test.terra.common import QiskitAerTestCase from qiskit.circuit import QuantumCircuit + class SimulatorTestCase(QiskitAerTestCase): """Simulator test class""" From e74a9391a5fb5b2a4651989e3cfc674c763cfb74 Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Thu, 15 Aug 2024 13:05:53 +0900 Subject: [PATCH 09/13] resolve conflict --- qiskit_aer/backends/unitary_simulator.py | 40 ------------------------ 1 file changed, 40 deletions(-) diff --git a/qiskit_aer/backends/unitary_simulator.py b/qiskit_aer/backends/unitary_simulator.py index 807415aac3..afd765c304 100644 --- a/qiskit_aer/backends/unitary_simulator.py +++ b/qiskit_aer/backends/unitary_simulator.py @@ -314,43 +314,3 @@ def _execute_circuits(self, aer_circuits, noise_model, config): config = map_legacy_method_config(config) aer_circuits = add_final_save_op(aer_circuits, "unitary") return cpp_execute_circuits(self._controller, aer_circuits, noise_model, config) -<<<<<<< HEAD -======= - - def _validate(self, qobj): - """Semantic validations of the qobj which cannot be done via schemas. - Some of these may later move to backend schemas. - 1. Set shots=1 - 2. No measurements or reset - 3. Check number of qubits will fit in local memory. - """ - name = self.name - if getattr(qobj.config, "noise_model", None) is not None: - raise AerError(f"{name} does not support noise.") - - n_qubits = qobj.config.n_qubits - max_qubits = self.configuration()["n_qubits"] - if n_qubits > max_qubits: - raise AerError( - f"Number of qubits ({n_qubits}) is greater than " - f'max ({max_qubits}) for "{name}" with ' - f"{int(psutil.virtual_memory().total / (1024**3))} GB system memory." - ) - if qobj.config.shots != 1: - logger.info('"%s" only supports 1 shot. Setting shots=1.', name) - qobj.config.shots = 1 - for experiment in qobj.experiments: - exp_name = experiment.header.name - if getattr(experiment.config, "shots", 1) != 1: - logger.info( - '"%s" only supports 1 shot. Setting shots=1 for circuit "%s".', - name, - exp_name, - ) - experiment.config.shots = 1 - for operation in experiment.instructions: - if operation.name in ["measure", "reset"]: - raise AerError( - f"Unsupported {name} instruction {operation.name} in circuit {exp_name}" - ) ->>>>>>> upstream/main From 0eddf1267cbd1842efdbc729d0474f07a9973884 Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Thu, 15 Aug 2024 13:14:10 +0900 Subject: [PATCH 10/13] lint --- test/terra/backends/aer_simulator/test_executors.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/terra/backends/aer_simulator/test_executors.py b/test/terra/backends/aer_simulator/test_executors.py index f8c7e0c095..6f5f2fa336 100644 --- a/test/terra/backends/aer_simulator/test_executors.py +++ b/test/terra/backends/aer_simulator/test_executors.py @@ -29,6 +29,7 @@ from qiskit_aer.jobs import AerJob from test.terra.backends.simulator_test_case import SimulatorTestCase, supported_methods + def run_random_circuits(backend, shots=None, **run_options): """Test random circuits on different executor fictures""" job_size = 10 From 9f676c37da5f9dc38c74e526eec9d634df0781d1 Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Thu, 15 Aug 2024 13:19:05 +0900 Subject: [PATCH 11/13] lint --- qiskit_aer/backends/aerbackend.py | 1 + 1 file changed, 1 insertion(+) diff --git a/qiskit_aer/backends/aerbackend.py b/qiskit_aer/backends/aerbackend.py index 47edf4bb46..88bfc43284 100644 --- a/qiskit_aer/backends/aerbackend.py +++ b/qiskit_aer/backends/aerbackend.py @@ -18,6 +18,7 @@ import logging import time import uuid +import warnings from abc import ABC, abstractmethod from qiskit.circuit import QuantumCircuit, ParameterExpression, Delay From 80336649e8edfaf0017b46d976dc4d33cb49aab0 Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Thu, 15 Aug 2024 13:27:34 +0900 Subject: [PATCH 12/13] remove dask ref in release note --- releasenotes/notes/0.9/custom-executor-3d0048b4dd4fa722.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/releasenotes/notes/0.9/custom-executor-3d0048b4dd4fa722.yaml b/releasenotes/notes/0.9/custom-executor-3d0048b4dd4fa722.yaml index 6329e6b2bc..6a2ab59bb1 100644 --- a/releasenotes/notes/0.9/custom-executor-3d0048b4dd4fa722.yaml +++ b/releasenotes/notes/0.9/custom-executor-3d0048b4dd4fa722.yaml @@ -26,5 +26,5 @@ features: `Dask `__ distributed Client executors if the optional dask library is installed. Using a Dask executor allows configuring parallel execution of multiple circuits on HPC clusters. See the - Dask executor :ref:`API Documentation ` for additional details + Dask executor :ref:`API Documentation dask` for additional details on using Dask executors for HPC simulation. From f9577108428310dd1ebef538120e18f0d9b4b948 Mon Sep 17 00:00:00 2001 From: Jun Doi Date: Thu, 15 Aug 2024 13:40:18 +0900 Subject: [PATCH 13/13] remove dask ref in release note --- releasenotes/notes/0.9/custom-executor-3d0048b4dd4fa722.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/releasenotes/notes/0.9/custom-executor-3d0048b4dd4fa722.yaml b/releasenotes/notes/0.9/custom-executor-3d0048b4dd4fa722.yaml index 6a2ab59bb1..1efc1caa29 100644 --- a/releasenotes/notes/0.9/custom-executor-3d0048b4dd4fa722.yaml +++ b/releasenotes/notes/0.9/custom-executor-3d0048b4dd4fa722.yaml @@ -23,8 +23,8 @@ features: Supported executors include those in the Python ``concurrent.futures`` `module `__ (eg. ``ThreadPoolExecutor``, ``ProcessPoolExecutor``), and - `Dask `__ distributed Client executors if the optional + Dask __ distributed Client executors if the optional dask library is installed. Using a Dask executor allows configuring parallel execution of multiple circuits on HPC clusters. See the - Dask executor :ref:`API Documentation dask` for additional details + Dask executor API Documentation dask for additional details on using Dask executors for HPC simulation.