Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

# This file is part of the pyMOR project (http://www.pymor.org). 

# Copyright Holders: Rene Milk, Stephan Rave, Felix Schindler 

# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause) 

 

from __future__ import absolute_import, division, print_function 

 

import time 

from itertools import izip 

 

from pymor.algorithms.basisextension import trivial_basis_extension 

from pymor.core import getLogger 

from pymor.core.exceptions import ExtensionError 

 

 

def greedy(discretization, reductor, samples, initial_basis=None, use_estimator=True, error_norm=None, 

           extension_algorithm=trivial_basis_extension, target_error=None, max_extensions=None): 

    '''Greedy basis generation algorithm. 

 

    This algorithm generates a reduced basis by iteratively adding the 

    worst approximated solution snapshot for a given training set to the 

    reduced basis. The approximation error is computed either by directly 

    comparing the reduced solution to the detailed solution or by using 

    an error estimator (`use_estimator == True`). The reduction and basis 

    extension steps are performed by calling the methods provided by the 

    `reductor` and `extension_algorithm` arguments. 

 

    Parameters 

    ---------- 

    discretization 

        The |Discretization| to reduce. 

    reductor 

        Reductor for reducing the given |Discretization|. This has to be a 

        function of the form `reductor(discretization, basis, extends=None)`. 

        If your reductor takes more arguments, use, e.g., :func:`functools.partial`. 

        The method has to return a tuple 

        `(reduced_discretization, reconstructor, reduction_data)`. 

        In case the last basis extension was `hierarchic` (see 

        `extension_algorithm`), the extends argument is set to 

        `(last_reduced_discretization, last_reconstructor, last_reduction_data)` 

        which can be used by the reductor to speed up the reduction 

        process. For an example see 

        :func:`~pymor.reductors.linear.reduce_stationary_affine_linear`. 

    samples 

        The set of |Parameter| samples on which to perform the greedy search. 

    initial_basis 

        The initial reduced basis with which the algorithm starts. If `None`, 

        an empty basis is used as initial_basis. 

    use_estimator 

        If `True`, use `reduced_discretization.estimate()` to estimate the 

        errors on the sample set. Otherwise a detailed simulation is 

        performed to calculate the error. 

    error_norm 

        If `use_estimator == False`, use this function to calculate the 

        norm of the error. If `None`, the Euclidean norm is used. 

    extension_algorithm 

        The extension algorithm to be used to extend the current reduced 

        basis with the maximum error snapshot. This has to be a function 

        of the form `extension_algorithm(old_basis, new_vector)`, which 

        returns a tuple `(new_basis, extension_data)`, where 

        `extension_data` is a dict at least containing the key 

        `hierarchic`. `hierarchic` is set to `True` if `new_basis` 

        contains `old_basis` as its first vectors. 

    target_error 

        If not `None`, stop the algorithm if the maximum (estimated) error 

        on the sample set drops below this value. 

    max_extensions 

        If not `None`, stop the algorithm after `max_extensions` extension 

        steps. 

 

    Returns 

    ------- 

    Dict with the following fields: 

 

        :basis:                  The reduced basis. 

        :reduced_discretization: The reduced |Discretization| obtained for the 

                                 computed basis. 

        :reconstructor:          Reconstructor for `reduced_discretization`. 

        :max_err:                Last estimated maximum error on the sample set. 

        :max_err_mu:             The parameter that corresponds to `max_err`. 

        :max_errs:               Sequence of maximum errors during the greedy run. 

        :max_err_mus:            The parameters corresponding to `max_errs`. 

    ''' 

 

    logger = getLogger('pymor.algorithms.greedy.greedy') 

    samples = list(samples) 

    logger.info('Started greedy search on {} samples'.format(len(samples))) 

    basis = initial_basis 

 

    tic = time.time() 

    extensions = 0 

    max_errs = [] 

    max_err_mus = [] 

    hierarchic = False 

 

    rd, rc, reduction_data = None, None, None 

    while True: 

        logger.info('Reducing ...') 

        rd, rc, reduction_data = reductor(discretization, basis) if not hierarchic \ 

            else reductor(discretization, basis, extends=(rd, rc, reduction_data)) 

 

        logger.info('Estimating errors ...') 

        if use_estimator: 

            errors = [rd.estimate(rd.solve(mu), mu) for mu in samples] 

        elif error_norm is not None: 

            errors = [error_norm(discretization.solve(mu) - rc.reconstruct(rd.solve(mu))) for mu in samples] 

        else: 

            errors = [(discretization.solve(mu) - rc.reconstruct(rd.solve(mu))).l2_norm() for mu in samples] 

 

        # most error_norms will return an array of length 1 instead of a number, so we extract the numbers 

        # if necessary 

        errors = map(lambda x: x[0] if hasattr(x, '__len__') else x, errors) 

 

        max_err, max_err_mu = max(((err, mu) for err, mu in izip(errors, samples)), key=lambda t: t[0]) 

        max_errs.append(max_err) 

        max_err_mus.append(max_err_mu) 

        logger.info('Maximum error after {} extensions: {} (mu = {})'.format(extensions, max_err, max_err_mu)) 

 

        if target_error is not None and max_err <= target_error: 

            logger.info('Reached maximal error on snapshots of {} <= {}'.format(max_err, target_error)) 

            break 

 

        logger.info('Extending with snapshot for mu = {}'.format(max_err_mu)) 

        U = discretization.solve(max_err_mu) 

        try: 

            basis, extension_data = extension_algorithm(basis, U) 

        except ExtensionError: 

            logger.info('Extension failed. Stopping now.') 

            break 

        extensions += 1 

        if not 'hierarchic' in extension_data: 

            logger.warn('Extension algorithm does not report if extension was hierarchic. Assuming it was\'nt ..') 

            hierarchic = False 

        else: 

            hierarchic = extension_data['hierarchic'] 

 

        logger.info('') 

 

        if max_extensions is not None and extensions >= max_extensions: 

            logger.info('Maximal number of {} extensions reached.'.format(max_extensions)) 

            logger.info('Reducing once more ...') 

            rd, rc, reduction_data = reductor(discretization, basis) if not hierarchic \ 

                else reductor(discretization, basis, extends=(rd, rc, reduction_data)) 

            break 

 

    tictoc = time.time() - tic 

    logger.info('Greedy search took {} seconds'.format(tictoc)) 

    return {'basis': basis, 'reduced_discretization': rd, 'reconstructor': rc, 'max_err': max_err, 

            'max_err_mu': max_err_mu, 'max_errs': max_errs, 'max_err_mus': max_err_mus, 'extensions': extensions, 

            'time': tictoc, 'reduction_data': reduction_data}